hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
60421c63970c56fb013653048c542f811ebf5d2b
| 187
|
py
|
Python
|
tests/unit/polygon/__init__.py
|
phuntimes/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | 1
|
2020-11-26T05:58:23.000Z
|
2020-11-26T05:58:23.000Z
|
tests/unit/polygon/__init__.py
|
Sean-McVeigh/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | null | null | null |
tests/unit/polygon/__init__.py
|
Sean-McVeigh/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from mongoshapes import Polygon as GeoShape
from mongoshapes import PolygonDict as GeoDict
from mongoengine import PolygonField as GeoField
| 26.714286
| 48
| 0.786096
| 25
| 187
| 5.88
| 0.72
| 0.204082
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 0.139037
| 187
| 6
| 49
| 31.166667
| 0.906832
| 0.224599
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6060e6cd709be25769e32cfa1a27e9bf639b2c85
| 178
|
py
|
Python
|
djff2html/__init__.py
|
20x48/djff2html
|
03e14cb2a06dd490d299fc1c42039c03fbf507b2
|
[
"MIT"
] | 1
|
2022-02-04T13:57:56.000Z
|
2022-02-04T13:57:56.000Z
|
djff2html/__init__.py
|
20x48/djff2html
|
03e14cb2a06dd490d299fc1c42039c03fbf507b2
|
[
"MIT"
] | null | null | null |
djff2html/__init__.py
|
20x48/djff2html
|
03e14cb2a06dd490d299fc1c42039c03fbf507b2
|
[
"MIT"
] | null | null | null |
try:
from .j import generate, escape
from .v import __version__
except ImportError:
from j import generate, escape
from v import __version__
__author__ = '20x48'
| 22.25
| 35
| 0.724719
| 23
| 178
| 5.086957
| 0.521739
| 0.08547
| 0.188034
| 0.324786
| 0.735043
| 0.735043
| 0.735043
| 0.735043
| 0.735043
| 0
| 0
| 0.028986
| 0.224719
| 178
| 8
| 36
| 22.25
| 0.818841
| 0
| 0
| 0
| 1
| 0
| 0.027933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.714286
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
6089c284a12604a5b18643dee5d84142b986abf5
| 117
|
py
|
Python
|
restapi/src/models/__init__.py
|
Nyoton/MovieApp
|
48f703f23be544248d9de0004d12fa3c1db0aa42
|
[
"MIT"
] | null | null | null |
restapi/src/models/__init__.py
|
Nyoton/MovieApp
|
48f703f23be544248d9de0004d12fa3c1db0aa42
|
[
"MIT"
] | null | null | null |
restapi/src/models/__init__.py
|
Nyoton/MovieApp
|
48f703f23be544248d9de0004d12fa3c1db0aa42
|
[
"MIT"
] | null | null | null |
from src.models.Database import Database
from src.models.Movie import Movie
from src.models.Category import Category
| 29.25
| 40
| 0.846154
| 18
| 117
| 5.5
| 0.388889
| 0.212121
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 3
| 41
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
60b1bebda8ba9dc00e9cbd84631ac6caaa50c796
| 7,551
|
py
|
Python
|
tests/test_account.py
|
msumit/qds-sdk-py
|
d3c433f3649a1648e0dd5a6a80a152c9a16f3261
|
[
"Apache-2.0"
] | 1
|
2020-04-08T00:59:28.000Z
|
2020-04-08T00:59:28.000Z
|
tests/test_account.py
|
msumit/qds-sdk-py
|
d3c433f3649a1648e0dd5a6a80a152c9a16f3261
|
[
"Apache-2.0"
] | null | null | null |
tests/test_account.py
|
msumit/qds-sdk-py
|
d3c433f3649a1648e0dd5a6a80a152c9a16f3261
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import sys
import os
if sys.version_info > (2, 7, 0):
import unittest
else:
import unittest2 as unittest
from mock import Mock
sys.path.append(os.path.join(os.path.dirname(__file__), '../bin'))
import qds
from qds_sdk.connection import Connection
from test_base import print_command
from test_base import QdsCliTestCase
class TestAccountCreate(QdsCliTestCase):
def test_all(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("POST", "account", {'account': {
'name': 'new_account',
'acc_key': 'dummy',
'level': 'free',
'compute_type': 'CUSTOMER_MANAGED',
'aws_region': 'us-east-1',
'storage_type': 'CUSTOMER_MANAGED',
'CacheQuotaSizeInGB': '25',
'secret': 'dummy',
'use_previous_account_plan': 'true',
'compute_secret_key': 'dummy',
'compute_access_key': 'dummy',
'defloc': 's3://bucket/path'}})
def test_no_name(self):
sys.argv = ['qds.py', 'account', 'create',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_storage_acc_key(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_storage_secret_key(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_aws_region(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_invalid_region(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'non-existent']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_compute_acc_key(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_compute_secret_key(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_no_location(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--previous-account-plan', 'true',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
with self.assertRaises(SystemExit):
qds.main()
def test_default_previous_account_plan(self):
sys.argv = ['qds.py', 'account', 'create',
'--name', 'new_account',
'--location', 's3://bucket/path',
'--storage-access-key', 'dummy',
'--storage-secret-key', 'dummy',
'--compute-access-key', 'dummy',
'--compute-secret-key', 'dummy',
'--aws-region', 'us-east-1']
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with("POST", "account", {'account': {
'name': 'new_account',
'acc_key': 'dummy',
'level': 'free',
'compute_type': 'CUSTOMER_MANAGED',
'aws_region': 'us-east-1',
'storage_type': 'CUSTOMER_MANAGED',
'CacheQuotaSizeInGB': '25',
'secret': 'dummy',
'use_previous_account_plan': 'false',
'compute_secret_key': 'dummy',
'compute_access_key': 'dummy',
'defloc': 's3://bucket/path'}})
if __name__ == '__main__':
unittest.main()
| 40.164894
| 80
| 0.486558
| 706
| 7,551
| 5.009915
| 0.121813
| 0.094996
| 0.079163
| 0.06531
| 0.880973
| 0.880973
| 0.880973
| 0.872774
| 0.872774
| 0.872774
| 0
| 0.00587
| 0.345782
| 7,551
| 187
| 81
| 40.379679
| 0.710121
| 0
| 0
| 0.825581
| 0
| 0
| 0.323268
| 0.034035
| 0
| 0
| 0
| 0
| 0.05814
| 1
| 0.05814
| false
| 0
| 0.05814
| 0
| 0.122093
| 0.069767
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60b32ba0539bad43e4cd9a9093cbd011d46e7ecd
| 140
|
py
|
Python
|
loldib/getratings/models/NA/na_gnar/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_gnar/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_gnar/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_gnar_top import *
from .na_gnar_jng import *
from .na_gnar_mid import *
from .na_gnar_bot import *
from .na_gnar_sup import *
| 23.333333
| 27
| 0.75
| 25
| 140
| 3.8
| 0.36
| 0.315789
| 0.526316
| 0.673684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 140
| 5
| 28
| 28
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
60c145e8181bf238044c5864a38356bd71a9fbe2
| 5,581
|
py
|
Python
|
nipy/modalities/fmri/tests/test_iterators.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | 1
|
2015-08-22T16:14:45.000Z
|
2015-08-22T16:14:45.000Z
|
nipy/modalities/fmri/tests/test_iterators.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
nipy/modalities/fmri/tests/test_iterators.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
#TODO the iterators are deprecated
import numpy as np
from nipy.testing import *
from nipy.core.api import Image
import nipy.core.reference.coordinate_map as coordinate_map
from nipy.modalities.fmri.api import FmriImageList
"""
Comment out since these are slated for deletion and currently are broken.
Keep for reference until generators are working.
class test_Iterators(TestCase):
def setUp(self):
spacetime = ['time', 'zspace', 'yspace', 'xspace']
im = Image(np.zeros((3,4,5,6)),
coordinate_map = coordinate_map.CoordinateMap.identity((3,4,5,6), spacetime))
self.img = FmriImageList(im)
def test_fmri_parcel(self):
parcelmap = np.zeros(self.img.shape[1:])
parcelmap[0,0,0] = 1
parcelmap[1,1,1] = 1
parcelmap[2,2,2] = 1
parcelmap[1,2,1] = 2
parcelmap[2,3,2] = 2
parcelmap[0,1,0] = 2
parcelseq = (0, 1, 2, 3)
expected = [np.product(self.img.shape[1:]) - 6, 3, 3, 0]
iterator = parcel_iterator(self.img, parcelmap, parcelseq)
for i, slice_ in enumerate(iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
iterator = parcel_iterator(self.img, parcelmap)
for i, slice_ in enumerate(iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
def test_fmri_parcel_write(self):
parcelmap = np.zeros(self.img.shape[1:])
parcelmap[0,0,0] = 1
parcelmap[1,1,1] = 1
parcelmap[2,2,2] = 1
parcelmap[1,2,1] = 2
parcelmap[2,3,2] = 2
parcelmap[0,1,0] = 2
parcelseq = (0, 1, 2, 3)
expected = [np.product(self.img.shape[1:]) - 6, 3, 3, 0]
iterator = parcel_iterator(self.img, parcelmap, parcelseq, mode='w')
for i, slice_ in enumerate(iterator):
value = np.asarray([np.arange(expected[i]) for _ in range(self.img.shape[0])])
slice_.set(value)
iterator = parcel_iterator(self.img, parcelmap, parcelseq)
for i, slice_ in enumerate(iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
assert_equal(slice_, np.asarray([np.arange(expected[i]) for _ in range(self.img.shape[0])]))
iterator = parcel_iterator(self.img, parcelmap, mode='w')
for i, slice_ in enumerate(iterator):
value = np.asarray([np.arange(expected[i]) for _ in range(self.img.shape[0])])
slice_.set(value)
iterator = parcel_iterator(self.img, parcelmap)
for i, slice_ in enumerate(iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
assert_equal(slice_, np.asarray([np.arange(expected[i]) for _ in range(self.img.shape[0])]))
def test_fmri_parcel_copy(self):
parcelmap = np.zeros(self.img.shape[1:])
parcelmap[0,0,0] = 1
parcelmap[1,1,1] = 1
parcelmap[2,2,2] = 1
parcelmap[1,2,1] = 2
parcelmap[2,3,2] = 2
parcelmap[0,1,0] = 2
parcelseq = (0, 1, 2, 3)
expected = [np.product(self.img.shape[1:]) - 6, 3, 3, 0]
iterator = parcel_iterator(self.img, parcelmap, parcelseq)
tmp = FmriImageList(self.img[:] * 1., self.img.coordmap)
new_iterator = iterator.copy(tmp)
for i, slice_ in enumerate(new_iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
iterator = parcel_iterator(self.img, parcelmap)
for i, slice_ in enumerate(new_iterator):
self.assertEqual((self.img.shape[0], expected[i],), slice_.shape)
def test_fmri_sliceparcel(self):
parcelmap = np.asarray([[[0,0,0,1,2,2]]*5,
[[0,0,1,1,2,2]]*5,
[[0,0,0,0,2,2]]*5])
parcelseq = ((1, 2), 0, 2)
iterator = slice_parcel_iterator(self.img, parcelmap, parcelseq)
for i, slice_ in enumerate(iterator):
pm = parcelmap[i]
ps = parcelseq[i]
try:
x = len([n for n in pm.flat if n in ps])
except TypeError:
x = len([n for n in pm.flat if n == ps])
self.assertEqual(x, slice_.shape[1])
self.assertEqual(self.img.shape[0], slice_.shape[0])
def test_fmri_sliceparcel_write(self):
parcelmap = np.asarray([[[0,0,0,1,2,2]]*5,
[[0,0,1,1,2,2]]*5,
[[0,0,0,0,2,2]]*5])
parcelseq = ((1, 2), 0, 2)
iterator = slice_parcel_iterator(self.img, parcelmap, parcelseq, mode='w')
for i, slice_ in enumerate(iterator):
pm = parcelmap[i]
ps = parcelseq[i]
try:
x = len([n for n in pm.flat if n in ps])
except TypeError:
x = len([n for n in pm.flat if n == ps])
value = [i*np.arange(x) for i in range(self.img.shape[0])]
slice_.set(value)
iterator = slice_parcel_iterator(self.img, parcelmap, parcelseq)
for i, slice_ in enumerate(iterator):
pm = parcelmap[i]
ps = parcelseq[i]
try:
x = len([n for n in pm.flat if n in ps])
except TypeError:
x = len([n for n in pm.flat if n == ps])
value = [i*np.arange(x) for i in range(self.img.shape[0])]
self.assertEqual(x, slice_.shape[1])
self.assertEqual(self.img.shape[0], slice_.shape[0])
assert_equal(slice_, value)
"""
| 39.302817
| 104
| 0.567103
| 782
| 5,581
| 3.959079
| 0.121483
| 0.076873
| 0.077519
| 0.058786
| 0.803618
| 0.796512
| 0.796512
| 0.784238
| 0.784238
| 0.784238
| 0
| 0.045271
| 0.291525
| 5,581
| 141
| 105
| 39.58156
| 0.737734
| 0.005913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007092
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
60dd7d76d1add8fede3b1f9e02e6aaaeed31aa13
| 71,005
|
py
|
Python
|
tests/test_archive_api.py
|
shadchin/Opentok-Python-SDK
|
290fdefdff173858a7cba017047f555d5c6c3096
|
[
"MIT"
] | null | null | null |
tests/test_archive_api.py
|
shadchin/Opentok-Python-SDK
|
290fdefdff173858a7cba017047f555d5c6c3096
|
[
"MIT"
] | null | null | null |
tests/test_archive_api.py
|
shadchin/Opentok-Python-SDK
|
290fdefdff173858a7cba017047f555d5c6c3096
|
[
"MIT"
] | null | null | null |
import unittest
from six import text_type, u, b, PY2, PY3
from nose.tools import raises
from expects import *
import httpretty
from sure import expect
import textwrap
import json
import datetime
import pytz
import requests
from .validate_jwt import validate_jwt_header
from opentok import (
Client,
Archive,
ArchiveList,
OutputModes,
OpenTokException,
__version__,
ArchiveError,
)
class OpenTokArchiveApiTest(unittest.TestCase):
def setUp(self):
self.api_key = u("123456")
self.api_secret = u("1234567890abcdef1234567890abcdef1234567890")
self.session_id = u("SESSIONID")
self.opentok = Client(self.api_key, self.api_secret)
@httpretty.activate
def test_start_archive(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(self.session_id)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("name"), None))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), u("")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 0))
expect(archive).to(have_property(u("has_audio"), True))
expect(archive).to(have_property(u("has_video"), True))
expect(archive).to(have_property(u("url"), None))
@httpretty.activate
def test_start_archive_with_name(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(self.session_id, name=u("ARCHIVE NAME"))
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("name"), u("ARCHIVE NAME")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), ("ARCHIVE NAME")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), equal(created_at)))
expect(archive).to(have_property(u("size"), equal(0)))
expect(archive).to(have_property(u("duration"), equal(0)))
expect(archive).to(have_property(u("url"), equal(None)))
@httpretty.activate
def test_start_archive_with_640x480_resolution(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null,
"resolution": "640x480"
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(self.session_id, resolution="640x480")
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("resolution"), u("640x480")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("resolution"), "640x480"))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), equal(created_at)))
expect(archive).to(have_property(u("size"), equal(0)))
expect(archive).to(have_property(u("duration"), equal(0)))
expect(archive).to(have_property(u("url"), equal(None)))
@httpretty.activate
def test_start_archive_with_1280x720_resolution(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null,
"resolution": "1280x720"
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(self.session_id, resolution="1280x720")
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("resolution"), u("1280x720")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("resolution"), "1280x720"))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), equal(created_at)))
expect(archive).to(have_property(u("size"), equal(0)))
expect(archive).to(have_property(u("duration"), equal(0)))
expect(archive).to(have_property(u("url"), equal(None)))
def test_start_archive_individual_and_resolution_throws_error(self):
self.assertRaises(
OpenTokException,
self.opentok.start_archive,
session_id=self.session_id,
output_mode=OutputModes.individual,
resolution="640x480",
)
self.assertRaises(
OpenTokException,
self.opentok.start_archive,
session_id=self.session_id,
output_mode=OutputModes.individual,
resolution="1280x720",
)
@httpretty.activate
def test_start_voice_archive(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": false,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(
self.session_id, name=u("ARCHIVE NAME"), has_video=False
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("name"), u("ARCHIVE NAME")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), ("ARCHIVE NAME")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 0))
expect(archive).to(have_property(u("duration"), 0))
expect(archive).to(have_property(u("has_audio"), True))
expect(archive).to(have_property(u("has_video"), False))
expect(archive).to(have_property(u("url"), None))
@httpretty.activate
def test_start_individual_archive(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "individual",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(
self.session_id, name=u("ARCHIVE NAME"), output_mode=OutputModes.individual
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("name"), u("ARCHIVE NAME")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), ("ARCHIVE NAME")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 0))
expect(archive).to(have_property(u("duration"), 0))
expect(archive).to(have_property(u("has_audio"), True))
expect(archive).to(have_property(u("has_video"), True))
expect(archive).to(have_property(u("output_mode"), OutputModes.individual))
expect(archive).to(have_property(u("url"), None))
@httpretty.activate
def test_start_composed_archive(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "ARCHIVE NAME",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(
self.session_id, name=u("ARCHIVE NAME"), output_mode=OutputModes.composed
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("name"), u("ARCHIVE NAME")))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), ("ARCHIVE NAME")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 0))
expect(archive).to(have_property(u("duration"), 0))
expect(archive).to(have_property(u("has_audio"), True))
expect(archive).to(have_property(u("has_video"), True))
expect(archive).to(have_property(u("output_mode"), OutputModes.composed))
expect(archive).to(have_property(u("url"), None))
@httpretty.activate
def test_start_archive_with_layout(self):
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243556,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "started",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.start_archive(self.session_id, layout={"type": "pip", "screenshareType": "horizontal"})
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
# non-deterministic json encoding. have to decode to test it properly
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("sessionId"), u("SESSIONID")))
expect(body).to(have_key(u("name"), None))
expect(body).to(have_key(u("layout"), {"type": "pip", "screenshareType": "horizontal"}))
expect(archive).to(be_an(Archive))
expect(archive).to(
have_property(u("id"), u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9"))
)
expect(archive).to(have_property(u("name"), ("")))
expect(archive).to(have_property(u("status"), u("started")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), equal(created_at)))
expect(archive).to(have_property(u("size"), equal(0)))
expect(archive).to(have_property(u("duration"), equal(0)))
expect(archive).to(have_property(u("url"), equal(None)))
@httpretty.activate
def test_stop_archive(self):
archive_id = u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9")
httpretty.register_uri(
httpretty.POST,
u("https://api.opentok.com/v2/project/{0}/archive/{1}/stop").format(
self.api_key, archive_id
),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395183243000,
"duration" : 0,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 0,
"status" : "stopped",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.stop_archive(archive_id)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(archive).to(be_an(Archive))
expect(archive).to(have_property(u("id"), archive_id))
expect(archive).to(have_property(u("name"), u("")))
expect(archive).to(have_property(u("status"), u("stopped")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395183243, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395183243, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 0))
expect(archive).to(have_property(u("duration"), 0))
expect(archive).to(have_property(u("url"), None))
@httpretty.activate
def test_delete_archive(self):
archive_id = u("30b3ebf1-ba36-4f5b-8def-6f70d9986fe9")
httpretty.register_uri(
httpretty.DELETE,
u("https://api.opentok.com/v2/project/{0}/archive/{1}").format(
self.api_key, archive_id
),
body=u(""),
status=204,
)
self.opentok.delete_archive(archive_id)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
@httpretty.activate
def test_find_archive(self):
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive/{1}").format(
self.api_key, archive_id
),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Ff6e7ee58-d6cf-4a59-896b-6d56b158ec71%2Farchive.mp4?Expires=1395194362&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.get_archive(archive_id)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(archive).to(be_an(Archive))
expect(archive).to(have_property(u("id"), archive_id))
expect(archive).to(have_property(u("name"), u("")))
expect(archive).to(have_property(u("status"), u("available")))
expect(archive).to(have_property(u("session_id"), u("SESSIONID")))
expect(archive).to(have_property(u("partner_id"), 123456))
if PY2:
created_at = datetime.datetime.fromtimestamp(1395187836, pytz.UTC)
if PY3:
created_at = datetime.datetime.fromtimestamp(
1395187836, datetime.timezone.utc
)
expect(archive).to(have_property(u("created_at"), created_at))
expect(archive).to(have_property(u("size"), 8347554))
expect(archive).to(have_property(u("duration"), 62))
expect(archive).to(
have_property(
u("url"),
u(
"http://tokbox.com.archive2.s3.amazonaws.com/123456%2Ff6e7ee58-d6cf-4a59-896b-6d56b158ec71%2Farchive.mp4?Expires=1395194362&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
),
)
)
@httpretty.activate
def test_find_archives(self):
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 6,
"items" : [ {
"createdAt" : 1395187930000,
"duration" : 22,
"id" : "ef546c5a-4fd7-4e59-ab3d-f1cfb4148d1d",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2909274,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fef546c5a-4fd7-4e59-ab3d-f1cfb4148d1d%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395187910000,
"duration" : 14,
"id" : "5350f06f-0166-402e-bc27-09ba54948512",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 1952651,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F5350f06f-0166-402e-bc27-09ba54948512%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Ff6e7ee58-d6cf-4a59-896b-6d56b158ec71%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395183243000,
"duration" : 544,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 78499758,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F30b3ebf1-ba36-4f5b-8def-6f70d9986fe9%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"outputMode": "composed",
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
} ]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives()
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 6))
expect(list(archive_list.items)).to(have_length(6))
# TODO: we could inspect each item in the list
@httpretty.activate
def test_find_archives_with_offset(self):
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 6,
"items" : [ {
"createdAt" : 1395183243000,
"duration" : 544,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 78499758,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F30b3ebf1-ba36-4f5b-8def-6f70d9986fe9%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
} ]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives(offset=3)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(u("querystring"), {u("offset"): [u("3")]})
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 6))
expect(list(archive_list.items)).to(have_length(3))
# TODO: we could inspect each item in the list
@httpretty.activate
def test_find_archives_with_count(self):
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 6,
"items" : [ {
"createdAt" : 1395187930000,
"duration" : 22,
"id" : "ef546c5a-4fd7-4e59-ab3d-f1cfb4148d1d",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2909274,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fef546c5a-4fd7-4e59-ab3d-f1cfb4148d1d%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395187910000,
"duration" : 14,
"id" : "5350f06f-0166-402e-bc27-09ba54948512",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 1952651,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F5350f06f-0166-402e-bc27-09ba54948512%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
} ]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives(count=2)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(u("querystring"), {u("count"): [u("2")]})
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 6))
expect(list(archive_list.items)).to(have_length(2))
# TODO: we could inspect each item in the list
@httpretty.activate
def test_find_archives_with_offset_and_count(self):
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 6,
"items" : [ {
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Ff6e7ee58-d6cf-4a59-896b-6d56b158ec71%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395183243000,
"duration" : 544,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 78499758,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F30b3ebf1-ba36-4f5b-8def-6f70d9986fe9%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
} ]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives(count=4, offset=2)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(
u("querystring"), {u("offset"): [u("2")], u("count"): [u("4")]}
)
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 6))
expect(list(archive_list.items)).to(have_length(4))
# TODO: we could inspect each item in the list
@httpretty.activate
def test_find_archives_with_sessionid(self):
""" Test get_archives method using session_id parameter """
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 4,
"items" : [ {
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Ff6e7ee58-d6cf-4a59-896b-6d56b158ec71%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1395183243000,
"duration" : 544,
"id" : "30b3ebf1-ba36-4f5b-8def-6f70d9986fe9",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 78499758,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F30b3ebf1-ba36-4f5b-8def-6f70d9986fe9%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives(session_id="SESSIONID")
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(u("querystring"), {u("sessionId"): [u("SESSIONID")]})
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 4))
expect(list(archive_list.items)).to(have_length(4))
@httpretty.activate
def test_find_archives_with_offset_count_sessionId(self):
""" Test get_archives method using all parameters: offset, count and sessionId """
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 2,
"items" : [ {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.get_archives(
offset=2, count=2, session_id="SESSIONID"
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(
u("querystring"),
{
u("offset"): [u("2")],
u("count"): [u("2")],
u("sessionId"): [u("SESSIONID")],
},
)
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 2))
expect(list(archive_list.items)).to(have_length(2))
@httpretty.activate
def test_find_archives_alternative_method(self):
""" Test list_archives method using all parameters: offset, count and sessionId """
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive").format(self.api_key),
body=textwrap.dedent(
u(
"""\
{
"count" : 2,
"items" : [ {
"createdAt" : 1394396753000,
"duration" : 24,
"id" : "b8f64de1-e218-4091-9544-4cbf369fc238",
"name" : "showtime again",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 2227849,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2Fb8f64de1-e218-4091-9544-4cbf369fc238%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}, {
"createdAt" : 1394321113000,
"duration" : 1294,
"id" : "832641bf-5dbf-41a1-ad94-fea213e59a92",
"name" : "showtime",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 42165242,
"status" : "available",
"hasAudio": true,
"hasVideo": true,
"url" : "http://tokbox.com.archive2.s3.amazonaws.com/123456%2F832641bf-5dbf-41a1-ad94-fea213e59a92%2Farchive.mp4?Expires=1395188695&AWSAccessKeyId=AKIAI6LQCPIXYVWCQV6Q&Signature=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}]
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive_list = self.opentok.list_archives(
offset=2, count=2, session_id="SESSIONID"
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
expect(httpretty.last_request()).to(
have_property(
u("querystring"),
{
u("offset"): [u("2")],
u("count"): [u("2")],
u("sessionId"): [u("SESSIONID")],
},
)
)
expect(archive_list).to(be_an(ArchiveList))
expect(archive_list).to(have_property(u("count"), 2))
expect(list(archive_list.items)).to(have_length(2))
@httpretty.activate
def test_find_paused_archive(self):
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive/{1}").format(
self.api_key, archive_id
),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "paused",
"hasAudio": true,
"hasVideo": true,
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.get_archive(archive_id)
expect(archive).to(be_an(Archive))
expect(archive).to(have_property(u("status"), u("paused")))
@httpretty.activate
def test_find_expired_archive(self):
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive/{1}").format(
self.api_key, archive_id
),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "expired",
"hasAudio": true,
"hasVideo": true,
"url" : null
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.get_archive(archive_id)
expect(archive).to(be_an(Archive))
expect(archive).to(have_property(u("status"), u("expired")))
@httpretty.activate
def test_find_archive_with_unknown_properties(self):
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.GET,
u("https://api.opentok.com/v2/project/{0}/archive/{1}").format(
self.api_key, archive_id
),
body=textwrap.dedent(
u(
"""\
{
"createdAt" : 1395187836000,
"duration" : 62,
"id" : "f6e7ee58-d6cf-4a59-896b-6d56b158ec71",
"name" : "",
"partnerId" : 123456,
"reason" : "",
"sessionId" : "SESSIONID",
"size" : 8347554,
"status" : "expired",
"url" : null,
"hasAudio": true,
"hasVideo": true,
"notarealproperty" : "not a real value"
}
"""
)
),
status=200,
content_type=u("application/json"),
)
archive = self.opentok.get_archive(archive_id)
expect(archive).to(be_an(Archive))
@httpretty.activate
def test_set_archive_layout(self):
""" Test set archive layout functionality """
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.PUT,
u("https://api.opentok.com/v2/project/{0}/archive/{1}/layout").format(
self.api_key, archive_id
),
status=200,
content_type=u("application/json"),
)
self.opentok.set_archive_layout(archive_id, "horizontalPresentation")
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
@httpretty.activate
def test_set_archive_screenshare_type(self):
""" Test set archive layout functionality """
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.PUT,
u("https://api.opentok.com/v2/project/{0}/archive/{1}/layout").format(
self.api_key, archive_id
),
status=200,
content_type=u("application/json"),
)
self.opentok.set_archive_layout(archive_id, "bestFit", screenshare_type="horizontalPresentation")
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
if PY2:
body = json.loads(httpretty.last_request().body)
if PY3:
body = json.loads(httpretty.last_request().body.decode("utf-8"))
expect(body).to(have_key(u("type"), u("bestFit")))
expect(body).to_not(have_key(u("stylesheet")))
expect(body).to(have_key(u("screenshareType"), u("horizontalPresentation")))
@httpretty.activate
def test_set_custom_archive_layout(self):
""" Test set a custom archive layout specifying the 'stylesheet' parameter """
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.PUT,
u("https://api.opentok.com/v2/project/{0}/archive/{1}/layout").format(
self.api_key, archive_id
),
status=200,
content_type=u("application/json"),
)
self.opentok.set_archive_layout(
archive_id,
"custom",
"stream.instructor {position: absolute; width: 100%; height:50%;}",
)
validate_jwt_header(self, httpretty.last_request().headers[u("x-opentok-auth")])
expect(httpretty.last_request().headers[u("user-agent")]).to(
contain(u("OpenTok-Python-SDK/") + __version__)
)
expect(httpretty.last_request().headers[u("content-type")]).to(
equal(u("application/json"))
)
@httpretty.activate
def test_start_archive_with_streammode_auto(self):
url = f"https://api.opentok.com/v2/project/{self.api_key}/archive"
httpretty.register_uri(httpretty.POST,
url,
responses=[
httpretty.Response(body=json.dumps({"streamMode":"auto"}),
content_type="application/json",
status=200)
])
response = requests.post(url)
response.status_code.should.equal(200)
response.json().should.equal({"streamMode":"auto"})
response.headers["Content-Type"].should.equal("application/json")
@httpretty.activate
def test_start_archive_with_streammode_manual(self):
url = f"https://api.opentok.com/v2/project/{self.api_key}/archive"
httpretty.register_uri(httpretty.POST,
url,
responses=[
httpretty.Response(body=json.dumps({"streamMode":"manual"}),
content_type="application/json",
status=200)
])
response = requests.post(url)
response.status_code.should.equal(200)
response.json().should.equal({"streamMode":"manual"})
response.headers["Content-Type"].should.equal("application/json")
@httpretty.activate
def test_set_archive_layout_throws_exception(self):
""" Test invalid request in set archive layout """
archive_id = u("f6e7ee58-d6cf-4a59-896b-6d56b158ec71")
httpretty.register_uri(
httpretty.PUT,
u("https://api.opentok.com/v2/project/{0}/archive/{1}/layout").format(
self.api_key, archive_id
),
status=400,
content_type=u("application/json"),
)
self.assertRaises(
ArchiveError,
self.opentok.set_archive_layout,
archive_id,
"horizontalPresentation",
)
| 45.312699
| 241
| 0.477065
| 5,975
| 71,005
| 5.541255
| 0.046192
| 0.025371
| 0.051648
| 0.051648
| 0.958803
| 0.952762
| 0.947235
| 0.94204
| 0.938295
| 0.935546
| 0
| 0.085756
| 0.395479
| 71,005
| 1,566
| 242
| 45.341635
| 0.685584
| 0.01583
| 0
| 0.720137
| 0
| 0.001138
| 0.155118
| 0.019908
| 0
| 0
| 0
| 0.002554
| 0.003413
| 1
| 0.032992
| false
| 0
| 0.01479
| 0
| 0.048919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
716c036acd077e322fb5bbf9bea7e1d20e3f25dc
| 131,049
|
py
|
Python
|
examples/grids/grid_bpu/milano_8p1_vsg_1/milano_8p1_vsg_1.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | 1
|
2020-12-20T03:45:26.000Z
|
2020-12-20T03:45:26.000Z
|
examples/grids/grid_bpu/milano_8p1_vsg_1/milano_8p1_vsg_1.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
examples/grids/grid_bpu/milano_8p1_vsg_1/milano_8p1_vsg_1.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
import numpy as np
import numba
import scipy.optimize as sopt
import json
sin = np.sin
cos = np.cos
atan2 = np.arctan2
sqrt = np.sqrt
sign = np.sign
exp = np.exp
class milano_8p1_vsg_1_class:
def __init__(self):
self.t_end = 10.000000
self.Dt = 0.0010000
self.decimation = 10.000000
self.itol = 1e-6
self.Dt_max = 0.001000
self.Dt_min = 0.001000
self.solvern = 5
self.imax = 100
self.N_x = 27
self.N_y = 31
self.N_z = 7
self.N_store = 10000
self.params_list = ['S_base', 'g_1_2', 'b_1_2', 'bs_1_2', 'g_2_3', 'b_2_3', 'bs_2_3', 'U_1_n', 'U_2_n', 'U_3_n', 'S_n_2', 'H_2', 'Omega_b_2', 'T1d0_2', 'T1q0_2', 'X_d_2', 'X_q_2', 'X1d_2', 'X1q_2', 'D_2', 'R_a_2', 'K_delta_2', 'K_a_2', 'K_ai_2', 'T_r_2', 'Droop_2', 'T_gov_1_2', 'T_gov_2_2', 'T_gov_3_2', 'K_imw_2', 'omega_ref_2', 'S_n_3', 'H_3', 'Omega_b_3', 'T1d0_3', 'T1q0_3', 'X_d_3', 'X_q_3', 'X1d_3', 'X1q_3', 'D_3', 'R_a_3', 'K_delta_3', 'K_a_3', 'K_ai_3', 'T_r_3', 'Droop_3', 'T_gov_1_3', 'T_gov_2_3', 'T_gov_3_3', 'K_imw_3', 'omega_ref_3', 'K_sec_2', 'K_sec_3', 'S_n_1', 'R_s_1', 'H_1', 'Omega_b_1', 'R_v_1', 'X_v_1', 'D1_1', 'D2_1', 'D3_1', 'K_delta_1', 'T_wo_1', 'T_i_1', 'K_q_1', 'T_q_1', 'H_s_1', 'K_p_soc_1', 'K_i_soc_1']
self.params_values_list = [100000000.0, 0.07486570963334518, -0.7486570963334518, 8.4e-05, 7.486570963334518, -74.86570963334518, 8.4e-07, 20000.0, 20000.0, 20000.0, 900000000.0, 5.0, 314.1592653589793, 8.0, 0.4, 1.8, 1.7, 0.3, 0.55, 1.0, 0.01, 0.01, 100, 1e-06, 0.02, 0.05, 1.0, 2.0, 10.0, 0.0, 1.0, 900000000.0, 5.0, 314.1592653589793, 8.0, 0.4, 1.8, 1.7, 0.3, 0.55, 1.0, 0.0025, 0.01, 100, 1e-06, 0.02, 0.05, 1.0, 2.0, 10.0, 0.0, 1.0, 0.01, 0.01, 1000000.0, 0.01, 5.0, 314.1592653589793, 0.01, 0.1, 1.0, 0.0, 0.0, 0.01, 10.0, 0.01, 0.1, 0.1, 100.0, 1.0, 0.01]
self.inputs_ini_list = ['P_1', 'Q_1', 'P_2', 'Q_2', 'P_3', 'Q_3', 'v_ref_2', 'v_pss_2', 'p_c_2', 'v_ref_3', 'v_pss_3', 'p_c_3', 'p_in_1', 'Dp_ref_1', 'q_ref_1', 'p_src_1', 'soc_ref_1']
self.inputs_ini_values_list = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.7793, 1.0, 0.0, 0.7793, 0.0, 0.0, 0.0, 0.8, 0.5]
self.inputs_run_list = ['P_1', 'Q_1', 'P_2', 'Q_2', 'P_3', 'Q_3', 'v_ref_2', 'v_pss_2', 'p_c_2', 'v_ref_3', 'v_pss_3', 'p_c_3', 'p_in_1', 'Dp_ref_1', 'q_ref_1', 'p_src_1', 'soc_ref_1']
self.inputs_run_values_list = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.7793, 1.0, 0.0, 0.7793, 0.0, 0.0, 0.0, 0.8, 0.5]
self.outputs_list = ['V_1', 'V_2', 'V_3', 'p_e_2', 'p_e_3', 'p_t_1', 'p_soc_1']
self.x_list = ['delta_2', 'omega_2', 'e1q_2', 'e1d_2', 'v_c_2', 'xi_v_2', 'x_gov_1_2', 'x_gov_2_2', 'xi_imw_2', 'delta_3', 'omega_3', 'e1q_3', 'e1d_3', 'v_c_3', 'xi_v_3', 'x_gov_1_3', 'x_gov_2_3', 'xi_imw_3', 'xi_freq', 'delta_1', 'omega_v_1', 'x_wo_1', 'i_d_1', 'i_q_1', 'xi_q_1', 'soc_1', 'xi_soc_1']
self.y_run_list = ['V_1', 'theta_1', 'V_2', 'theta_2', 'V_3', 'theta_3', 'i_d_2', 'i_q_2', 'p_g_2_1', 'q_g_2_1', 'v_f_2', 'p_m_ref_2', 'p_m_2', 'i_d_3', 'i_q_3', 'p_g_3_1', 'q_g_3_1', 'v_f_3', 'p_m_ref_3', 'p_m_3', 'p_r_2', 'p_r_3', 'i_d_ref_1', 'i_q_ref_1', 'p_g_1_1', 'q_g_1_1', 'p_d2_1', 'e_v_1', 'p_sto_1', 'p_m_1', 'omega_coi']
self.xy_list = self.x_list + self.y_run_list
self.y_ini_list = ['V_1', 'theta_1', 'V_2', 'theta_2', 'V_3', 'theta_3', 'i_d_2', 'i_q_2', 'p_g_2_1', 'q_g_2_1', 'v_f_2', 'p_m_ref_2', 'p_m_2', 'i_d_3', 'i_q_3', 'p_g_3_1', 'q_g_3_1', 'v_f_3', 'p_m_ref_3', 'p_m_3', 'p_r_2', 'p_r_3', 'i_d_ref_1', 'i_q_ref_1', 'p_g_1_1', 'q_g_1_1', 'p_d2_1', 'e_v_1', 'p_sto_1', 'p_m_1', 'omega_coi']
self.xy_ini_list = self.x_list + self.y_ini_list
self.t = 0.0
self.it = 0
self.it_store = 0
self.xy_prev = np.zeros((self.N_x+self.N_y,1))
self.initialization_tol = 1e-6
self.N_u = len(self.inputs_run_list)
self.sopt_root_method='hybr'
self.sopt_root_jac=True
self.u_ini_list = self.inputs_ini_list
self.u_ini_values_list = self.inputs_ini_values_list
self.u_run_list = self.inputs_run_list
self.u_run_values_list = self.inputs_run_values_list
self.N_u = len(self.u_run_list)
Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols = nonzeros()
self.Fx_ini_rows = np.array(Fx_ini_rows)
if len(Fx_ini_rows) == 1:
self.Fx_ini_rows = np.array([[Fx_ini_rows]]).reshape(1,)
self.Fx_ini_cols = np.array([[Fx_ini_cols]]).reshape(1,)
self.Fx_ini_cols = np.array(Fx_ini_cols)
self.Fy_ini_rows = np.array(Fy_ini_rows)
self.Fy_ini_cols = np.array(Fy_ini_cols)
self.Gx_ini_rows = np.array(Gx_ini_rows)
self.Gx_ini_cols = np.array(Gx_ini_cols)
self.Gy_ini_rows = np.array(Gy_ini_rows)
self.Gy_ini_cols = np.array(Gy_ini_cols)
self.yini2urun = list(set(self.inputs_run_list).intersection(set(self.y_ini_list)))
self.uini2yrun = list(set(self.y_run_list).intersection(set(self.inputs_ini_list)))
self.update()
def update(self):
self.N_steps = int(np.ceil(self.t_end/self.Dt))
dt = [
('t_end', np.float64),
('Dt', np.float64),
('decimation', np.float64),
('itol', np.float64),
('Dt_max', np.float64),
('Dt_min', np.float64),
('solvern', np.int64),
('imax', np.int64),
('N_steps', np.int64),
('N_store', np.int64),
('N_x', np.int64),
('N_y', np.int64),
('N_z', np.int64),
('t', np.float64),
('it', np.int64),
('it_store', np.int64),
('idx', np.int64),
('idy', np.int64),
('f', np.float64, (self.N_x,1)),
('x', np.float64, (self.N_x,1)),
('x_0', np.float64, (self.N_x,1)),
('g', np.float64, (self.N_y,1)),
('y_run', np.float64, (self.N_y,1)),
('y_ini', np.float64, (self.N_y,1)),
('u_run', np.float64, (self.N_u,1)),
('y_0', np.float64, (self.N_y,1)),
('h', np.float64, (self.N_z,1)),
('Fx', np.float64, (self.N_x,self.N_x)),
('Fy', np.float64, (self.N_x,self.N_y)),
('Gx', np.float64, (self.N_y,self.N_x)),
('Gy', np.float64, (self.N_y,self.N_y)),
('Fu', np.float64, (self.N_x,self.N_u)),
('Gu', np.float64, (self.N_y,self.N_u)),
('Hx', np.float64, (self.N_z,self.N_x)),
('Hy', np.float64, (self.N_z,self.N_y)),
('Hu', np.float64, (self.N_z,self.N_u)),
('Fx_ini', np.float64, (self.N_x,self.N_x)),
('Fy_ini', np.float64, (self.N_x,self.N_y)),
('Gx_ini', np.float64, (self.N_y,self.N_x)),
('Gy_ini', np.float64, (self.N_y,self.N_y)),
('T', np.float64, (self.N_store+1,1)),
('X', np.float64, (self.N_store+1,self.N_x)),
('Y', np.float64, (self.N_store+1,self.N_y)),
('Z', np.float64, (self.N_store+1,self.N_z)),
('iters', np.float64, (self.N_store+1,1)),
('store', np.int64),
('Fx_ini_rows', np.int64, self.Fx_ini_rows.shape),
('Fx_ini_cols', np.int64, self.Fx_ini_cols.shape),
('Fy_ini_rows', np.int64, self.Fy_ini_rows.shape),
('Fy_ini_cols', np.int64, self.Fy_ini_cols.shape),
('Gx_ini_rows', np.int64, self.Gx_ini_rows.shape),
('Gx_ini_cols', np.int64, self.Gx_ini_cols.shape),
('Gy_ini_rows', np.int64, self.Gy_ini_rows.shape),
('Gy_ini_cols', np.int64, self.Gy_ini_cols.shape),
('Ac_ini', np.float64, ((self.N_x+self.N_y,self.N_x+self.N_y))),
('fg', np.float64, ((self.N_x+self.N_y,1))),
]
values = [
self.t_end,
self.Dt,
self.decimation,
self.itol,
self.Dt_max,
self.Dt_min,
self.solvern,
self.imax,
self.N_steps,
self.N_store,
self.N_x,
self.N_y,
self.N_z,
self.t,
self.it,
self.it_store,
0, # idx
0, # idy
np.zeros((self.N_x,1)), # f
np.zeros((self.N_x,1)), # x
np.zeros((self.N_x,1)), # x_0
np.zeros((self.N_y,1)), # g
np.zeros((self.N_y,1)), # y_run
np.zeros((self.N_y,1)), # y_ini
np.zeros((self.N_u,1)), # u_run
np.zeros((self.N_y,1)), # y_0
np.zeros((self.N_z,1)), # h
np.zeros((self.N_x,self.N_x)), # Fx
np.zeros((self.N_x,self.N_y)), # Fy
np.zeros((self.N_y,self.N_x)), # Gx
np.zeros((self.N_y,self.N_y)), # Fy
np.zeros((self.N_x,self.N_u)), # Fu
np.zeros((self.N_y,self.N_u)), # Gu
np.zeros((self.N_z,self.N_x)), # Hx
np.zeros((self.N_z,self.N_y)), # Hy
np.zeros((self.N_z,self.N_u)), # Hu
np.zeros((self.N_x,self.N_x)), # Fx_ini
np.zeros((self.N_x,self.N_y)), # Fy_ini
np.zeros((self.N_y,self.N_x)), # Gx_ini
np.zeros((self.N_y,self.N_y)), # Fy_ini
np.zeros((self.N_store+1,1)), # T
np.zeros((self.N_store+1,self.N_x)), # X
np.zeros((self.N_store+1,self.N_y)), # Y
np.zeros((self.N_store+1,self.N_z)), # Z
np.zeros((self.N_store+1,1)), # iters
1,
self.Fx_ini_rows,
self.Fx_ini_cols,
self.Fy_ini_rows,
self.Fy_ini_cols,
self.Gx_ini_rows,
self.Gx_ini_cols,
self.Gy_ini_rows,
self.Gy_ini_cols,
np.zeros((self.N_x+self.N_y,self.N_x+self.N_y)),
np.zeros((self.N_x+self.N_y,1)),
]
dt += [(item,np.float64) for item in self.params_list]
values += [item for item in self.params_values_list]
for item_id,item_val in zip(self.inputs_ini_list,self.inputs_ini_values_list):
if item_id in self.inputs_run_list: continue
dt += [(item_id,np.float64)]
values += [item_val]
dt += [(item,np.float64) for item in self.inputs_run_list]
values += [item for item in self.inputs_run_values_list]
self.struct = np.rec.array([tuple(values)], dtype=np.dtype(dt))
xy0 = np.zeros((self.N_x+self.N_y,))
self.ini_dae_jacobian_nn(xy0)
self.run_dae_jacobian_nn(xy0)
def load_params(self,data_input):
if type(data_input) == str:
json_file = data_input
self.json_file = json_file
self.json_data = open(json_file).read().replace("'",'"')
data = json.loads(self.json_data)
elif type(data_input) == dict:
data = data_input
self.data = data
for item in self.data:
self.struct[0][item] = self.data[item]
if item in self.params_list:
self.params_values_list[self.params_list.index(item)] = self.data[item]
elif item in self.inputs_ini_list:
self.inputs_ini_values_list[self.inputs_ini_list.index(item)] = self.data[item]
elif item in self.inputs_run_list:
self.inputs_run_values_list[self.inputs_run_list.index(item)] = self.data[item]
else:
print(f'parameter or input {item} not found')
def ini_problem(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,2)
ini(self.struct,3)
else:
ini.py_func(self.struct,2)
ini.py_func(self.struct,3)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_problem(self,x):
t = self.struct[0].t
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
run(t,self.struct,2)
run(t,self.struct,3)
run(t,self.struct,10)
run(t,self.struct,11)
run(t,self.struct,12)
run(t,self.struct,13)
else:
run.py_func(t,self.struct,2)
run.py_func(t,self.struct,3)
run.py_func(t,self.struct,10)
run.py_func(t,self.struct,11)
run.py_func(t,self.struct,12)
run.py_func(t,self.struct,13)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,13)
A_c = np.block([[self.struct[0].Fx,self.struct[0].Fy],
[self.struct[0].Gx,self.struct[0].Gy]])
return A_c
def run_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run_nn(0.0,self.struct,10)
run_nn(0.0,self.struct,11)
run_nn(0.0,self.struct,12)
run_nn(0.0,self.struct,13)
def eval_jacobians(self):
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
return 1
def ini_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,10)
ini(self.struct,11)
else:
ini.py_func(self.struct,10)
ini.py_func(self.struct,11)
A_c = np.block([[self.struct[0].Fx_ini,self.struct[0].Fy_ini],
[self.struct[0].Gx_ini,self.struct[0].Gy_ini]])
return A_c
def ini_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
ini_nn(self.struct,10)
ini_nn(self.struct,11)
def f_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_odeint(self,x,t):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_ivp(self,t,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def Fx_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,10)
return self.struct[0].Fx
def eval_A(self):
Fx = self.struct[0].Fx
Fy = self.struct[0].Fy
Gx = self.struct[0].Gx
Gy = self.struct[0].Gy
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
self.A = A
return A
def eval_A_ini(self):
Fx = self.struct[0].Fx_ini
Fy = self.struct[0].Fy_ini
Gx = self.struct[0].Gx_ini
Gy = self.struct[0].Gy_ini
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
return A
def reset(self):
for param,param_value in zip(self.params_list,self.params_values_list):
self.struct[0][param] = param_value
for input_name,input_value in zip(self.inputs_ini_list,self.inputs_ini_values_list):
self.struct[0][input_name] = input_value
for input_name,input_value in zip(self.inputs_run_list,self.inputs_run_values_list):
self.struct[0][input_name] = input_value
def simulate(self,events,xy0=0):
# initialize both the ini and the run system
self.initialize(events,xy0=xy0)
# simulation run
for event in events:
# make all the desired changes
self.run([event])
# post process
T,X,Y,Z = self.post()
return T,X,Y,Z
def run(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
daesolver(self.struct) # run until next event
return 1
def rtrun(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
self.struct[0].it_store = self.struct[0].N_store-1
daesolver(self.struct) # run until next event
return 1
def post(self):
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return T,X,Y,Z
def save_0(self,file_name = 'xy_0.json'):
xy_0_dict = {}
for item in self.x_list:
xy_0_dict.update({item:self.get_value(item)})
for item in self.y_ini_list:
xy_0_dict.update({item:self.get_value(item)})
xy_0_str = json.dumps(xy_0_dict, indent=4)
with open(file_name,'w') as fobj:
fobj.write(xy_0_str)
def load_0(self,file_name = 'xy_0.json'):
with open(file_name) as fobj:
xy_0_str = fobj.read()
xy_0_dict = json.loads(xy_0_str)
for item in xy_0_dict:
if item in self.x_list:
self.xy_prev[self.x_list.index(item)] = xy_0_dict[item]
if item in self.y_ini_list:
self.xy_prev[self.y_ini_list.index(item)+self.N_x] = xy_0_dict[item]
def initialize(self,events=[{}],xy0=0,compile=True):
'''
Parameters
----------
events : dictionary
Dictionary with at least 't_end' and all inputs and parameters
that need to be changed.
xy0 : float or string, optional
0 means all states should be zero as initial guess.
If not zero all the states initial guess are the given input.
If 'prev' it uses the last known initialization result as initial guess.
Returns
-------
T : TYPE
DESCRIPTION.
X : TYPE
DESCRIPTION.
Y : TYPE
DESCRIPTION.
Z : TYPE
DESCRIPTION.
'''
self.compile = compile
# simulation parameters
self.struct[0].it = 0 # set time step to zero
self.struct[0].it_store = 0 # set storage to zero
self.struct[0].t = 0.0 # set time to zero
# initialization
it_event = 0
event = events[it_event]
for item in event:
self.struct[0][item] = event[item]
## compute initial conditions using x and y_ini
if type(xy0) == str:
if xy0 == 'prev':
xy0 = self.xy_prev
else:
self.load_0(xy0)
xy0 = self.xy_prev
elif type(xy0) == dict:
with open('xy_0.json','w') as fobj:
fobj.write(json.dumps(xy0))
self.load_0('xy_0.json')
xy0 = self.xy_prev
else:
if xy0 == 0:
xy0 = np.zeros(self.N_x+self.N_y)
elif xy0 == 1:
xy0 = np.ones(self.N_x+self.N_y)
else:
xy0 = xy0*np.ones(self.N_x+self.N_y)
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.ini_problem, xy0,
jac=self.ini_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.ini_problem, xy0, method=self.sopt_root_method)
self.initialization_ok = True
if sol.success == False:
print('initialization not found!')
self.initialization_ok = False
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
if self.initialization_ok:
xy = sol.x
self.xy_prev = xy
self.struct[0].x[:,0] = xy[0:self.N_x]
self.struct[0].y_run[:,0] = xy[self.N_x:]
## y_ini to u_run
for item in self.inputs_run_list:
if item in self.y_ini_list:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.inputs_ini_list:
if item in self.y_run_list:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.run_problem, xy0,
jac=self.run_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.run_problem, xy0, method=self.sopt_root_method)
if self.compile:
# evaluate f and g
run(0.0,self.struct,2)
run(0.0,self.struct,3)
# evaluate run jacobians
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,14)
else:
# evaluate f and g
run.py_func(0.0,self.struct,2)
run.py_func(0.0,self.struct,3)
# evaluate run jacobians
run.py_func(0.0,self.struct,10)
run.py_func(0.0,self.struct,11)
run.py_func(0.0,self.struct,12)
run.py_func(0.0,self.struct,14)
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return self.initialization_ok
def get_value(self,name):
if name in self.inputs_run_list:
value = self.struct[0][name]
if name in self.x_list:
idx = self.x_list.index(name)
value = self.struct[0].x[idx,0]
if name in self.y_run_list:
idy = self.y_run_list.index(name)
value = self.struct[0].y_run[idy,0]
if name in self.params_list:
value = self.struct[0][name]
if name in self.outputs_list:
value = self.struct[0].h[self.outputs_list.index(name),0]
return value
def get_values(self,name):
if name in self.x_list:
values = self.X[:,self.x_list.index(name)]
if name in self.y_run_list:
values = self.Y[:,self.y_run_list.index(name)]
if name in self.outputs_list:
values = self.Z[:,self.outputs_list.index(name)]
return values
def get_mvalue(self,names):
'''
Parameters
----------
names : list
list of variables names to return each value.
Returns
-------
mvalue : TYPE
list of value of each variable.
'''
mvalue = []
for name in names:
mvalue += [self.get_value(name)]
return mvalue
def set_value(self,name_,value):
if name_ in self.inputs_run_list:
self.struct[0][name_] = value
return
elif name_ in self.params_list:
self.struct[0][name_] = value
return
elif name_ in self.inputs_ini_list:
self.struct[0][name_] = value
return
else:
print(f'Input or parameter {name_} not found.')
def set_values(self,dictionary):
for item in dictionary:
self.set_value(item,dictionary[item])
def report_x(self,value_format='5.2f', decimals=2):
for item in self.x_list:
print(f'{item:5s} = {self.get_value(item):5.{decimals}f}')
def report_y(self,value_format='5.2f', decimals=2):
for item in self.y_run_list:
print(f'{item:5s} = {self.get_value(item):5.{decimals}f}')
def report_u(self,value_format='5.2f', decimals=2):
for item in self.inputs_run_list:
print(f'{item:5s} = {self.get_value(item):5.{decimals}f}')
def report_z(self,value_format='5.2f', decimals=2):
for item in self.outputs_list:
print(f'{item:5s} = {self.get_value(item):5.{decimals}f}')
def report_params(self,value_format='5.2f', decimals=2):
for item in self.params_list:
print(f'{item:5s} = {self.get_value(item):5.{decimals}f}')
def get_x(self):
return self.struct[0].x
def ss(self):
ssate(self.struct,self.xy_prev.reshape(len(self.xy_prev),1))
## y_ini to y_run
self.struct[0].y_run = self.struct[0].y_ini
## y_ini to u_run
for item in self.yini2urun:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.uini2yrun:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
@numba.njit(cache=True)
def ini(struct,mode):
# Parameters:
S_base = struct[0].S_base
g_1_2 = struct[0].g_1_2
b_1_2 = struct[0].b_1_2
bs_1_2 = struct[0].bs_1_2
g_2_3 = struct[0].g_2_3
b_2_3 = struct[0].b_2_3
bs_2_3 = struct[0].bs_2_3
U_1_n = struct[0].U_1_n
U_2_n = struct[0].U_2_n
U_3_n = struct[0].U_3_n
S_n_2 = struct[0].S_n_2
H_2 = struct[0].H_2
Omega_b_2 = struct[0].Omega_b_2
T1d0_2 = struct[0].T1d0_2
T1q0_2 = struct[0].T1q0_2
X_d_2 = struct[0].X_d_2
X_q_2 = struct[0].X_q_2
X1d_2 = struct[0].X1d_2
X1q_2 = struct[0].X1q_2
D_2 = struct[0].D_2
R_a_2 = struct[0].R_a_2
K_delta_2 = struct[0].K_delta_2
K_a_2 = struct[0].K_a_2
K_ai_2 = struct[0].K_ai_2
T_r_2 = struct[0].T_r_2
Droop_2 = struct[0].Droop_2
T_gov_1_2 = struct[0].T_gov_1_2
T_gov_2_2 = struct[0].T_gov_2_2
T_gov_3_2 = struct[0].T_gov_3_2
K_imw_2 = struct[0].K_imw_2
omega_ref_2 = struct[0].omega_ref_2
S_n_3 = struct[0].S_n_3
H_3 = struct[0].H_3
Omega_b_3 = struct[0].Omega_b_3
T1d0_3 = struct[0].T1d0_3
T1q0_3 = struct[0].T1q0_3
X_d_3 = struct[0].X_d_3
X_q_3 = struct[0].X_q_3
X1d_3 = struct[0].X1d_3
X1q_3 = struct[0].X1q_3
D_3 = struct[0].D_3
R_a_3 = struct[0].R_a_3
K_delta_3 = struct[0].K_delta_3
K_a_3 = struct[0].K_a_3
K_ai_3 = struct[0].K_ai_3
T_r_3 = struct[0].T_r_3
Droop_3 = struct[0].Droop_3
T_gov_1_3 = struct[0].T_gov_1_3
T_gov_2_3 = struct[0].T_gov_2_3
T_gov_3_3 = struct[0].T_gov_3_3
K_imw_3 = struct[0].K_imw_3
omega_ref_3 = struct[0].omega_ref_3
K_sec_2 = struct[0].K_sec_2
K_sec_3 = struct[0].K_sec_3
S_n_1 = struct[0].S_n_1
R_s_1 = struct[0].R_s_1
H_1 = struct[0].H_1
Omega_b_1 = struct[0].Omega_b_1
R_v_1 = struct[0].R_v_1
X_v_1 = struct[0].X_v_1
D1_1 = struct[0].D1_1
D2_1 = struct[0].D2_1
D3_1 = struct[0].D3_1
K_delta_1 = struct[0].K_delta_1
T_wo_1 = struct[0].T_wo_1
T_i_1 = struct[0].T_i_1
K_q_1 = struct[0].K_q_1
T_q_1 = struct[0].T_q_1
H_s_1 = struct[0].H_s_1
K_p_soc_1 = struct[0].K_p_soc_1
K_i_soc_1 = struct[0].K_i_soc_1
# Inputs:
P_1 = struct[0].P_1
Q_1 = struct[0].Q_1
P_2 = struct[0].P_2
Q_2 = struct[0].Q_2
P_3 = struct[0].P_3
Q_3 = struct[0].Q_3
v_ref_2 = struct[0].v_ref_2
v_pss_2 = struct[0].v_pss_2
p_c_2 = struct[0].p_c_2
v_ref_3 = struct[0].v_ref_3
v_pss_3 = struct[0].v_pss_3
p_c_3 = struct[0].p_c_3
p_in_1 = struct[0].p_in_1
Dp_ref_1 = struct[0].Dp_ref_1
q_ref_1 = struct[0].q_ref_1
p_src_1 = struct[0].p_src_1
soc_ref_1 = struct[0].soc_ref_1
# Dynamical states:
delta_2 = struct[0].x[0,0]
omega_2 = struct[0].x[1,0]
e1q_2 = struct[0].x[2,0]
e1d_2 = struct[0].x[3,0]
v_c_2 = struct[0].x[4,0]
xi_v_2 = struct[0].x[5,0]
x_gov_1_2 = struct[0].x[6,0]
x_gov_2_2 = struct[0].x[7,0]
xi_imw_2 = struct[0].x[8,0]
delta_3 = struct[0].x[9,0]
omega_3 = struct[0].x[10,0]
e1q_3 = struct[0].x[11,0]
e1d_3 = struct[0].x[12,0]
v_c_3 = struct[0].x[13,0]
xi_v_3 = struct[0].x[14,0]
x_gov_1_3 = struct[0].x[15,0]
x_gov_2_3 = struct[0].x[16,0]
xi_imw_3 = struct[0].x[17,0]
xi_freq = struct[0].x[18,0]
delta_1 = struct[0].x[19,0]
omega_v_1 = struct[0].x[20,0]
x_wo_1 = struct[0].x[21,0]
i_d_1 = struct[0].x[22,0]
i_q_1 = struct[0].x[23,0]
xi_q_1 = struct[0].x[24,0]
soc_1 = struct[0].x[25,0]
xi_soc_1 = struct[0].x[26,0]
# Algebraic states:
V_1 = struct[0].y_ini[0,0]
theta_1 = struct[0].y_ini[1,0]
V_2 = struct[0].y_ini[2,0]
theta_2 = struct[0].y_ini[3,0]
V_3 = struct[0].y_ini[4,0]
theta_3 = struct[0].y_ini[5,0]
i_d_2 = struct[0].y_ini[6,0]
i_q_2 = struct[0].y_ini[7,0]
p_g_2_1 = struct[0].y_ini[8,0]
q_g_2_1 = struct[0].y_ini[9,0]
v_f_2 = struct[0].y_ini[10,0]
p_m_ref_2 = struct[0].y_ini[11,0]
p_m_2 = struct[0].y_ini[12,0]
i_d_3 = struct[0].y_ini[13,0]
i_q_3 = struct[0].y_ini[14,0]
p_g_3_1 = struct[0].y_ini[15,0]
q_g_3_1 = struct[0].y_ini[16,0]
v_f_3 = struct[0].y_ini[17,0]
p_m_ref_3 = struct[0].y_ini[18,0]
p_m_3 = struct[0].y_ini[19,0]
p_r_2 = struct[0].y_ini[20,0]
p_r_3 = struct[0].y_ini[21,0]
i_d_ref_1 = struct[0].y_ini[22,0]
i_q_ref_1 = struct[0].y_ini[23,0]
p_g_1_1 = struct[0].y_ini[24,0]
q_g_1_1 = struct[0].y_ini[25,0]
p_d2_1 = struct[0].y_ini[26,0]
e_v_1 = struct[0].y_ini[27,0]
p_sto_1 = struct[0].y_ini[28,0]
p_m_1 = struct[0].y_ini[29,0]
omega_coi = struct[0].y_ini[30,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_2*delta_2 + Omega_b_2*(omega_2 - omega_coi)
struct[0].f[1,0] = (-D_2*(omega_2 - omega_coi) - i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) - i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2)) + p_m_2)/(2*H_2)
struct[0].f[2,0] = (-e1q_2 - i_d_2*(-X1d_2 + X_d_2) + v_f_2)/T1d0_2
struct[0].f[3,0] = (-e1d_2 + i_q_2*(-X1q_2 + X_q_2))/T1q0_2
struct[0].f[4,0] = (V_2 - v_c_2)/T_r_2
struct[0].f[5,0] = -V_2 + v_ref_2
struct[0].f[6,0] = (p_m_ref_2 - x_gov_1_2)/T_gov_1_2
struct[0].f[7,0] = (x_gov_1_2 - x_gov_2_2)/T_gov_3_2
struct[0].f[8,0] = K_imw_2*(p_c_2 - p_g_2_1) - 1.0e-6*xi_imw_2
struct[0].f[9,0] = -K_delta_3*delta_3 + Omega_b_3*(omega_3 - omega_coi)
struct[0].f[10,0] = (-D_3*(omega_3 - omega_coi) - i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) - i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3)) + p_m_3)/(2*H_3)
struct[0].f[11,0] = (-e1q_3 - i_d_3*(-X1d_3 + X_d_3) + v_f_3)/T1d0_3
struct[0].f[12,0] = (-e1d_3 + i_q_3*(-X1q_3 + X_q_3))/T1q0_3
struct[0].f[13,0] = (V_3 - v_c_3)/T_r_3
struct[0].f[14,0] = -V_3 + v_ref_3
struct[0].f[15,0] = (p_m_ref_3 - x_gov_1_3)/T_gov_1_3
struct[0].f[16,0] = (x_gov_1_3 - x_gov_2_3)/T_gov_3_3
struct[0].f[17,0] = K_imw_3*(p_c_3 - p_g_3_1) - 1.0e-6*xi_imw_3
struct[0].f[18,0] = 1 - omega_coi
struct[0].f[19,0] = D3_1*(-p_g_1_1 + p_m_1) - K_delta_1*delta_1 + Omega_b_1*(-omega_coi + omega_v_1)
struct[0].f[20,0] = (-D1_1*(omega_v_1 - 1) - p_d2_1 - p_g_1_1 + p_m_1)/(2*H_1)
struct[0].f[21,0] = (omega_v_1 - x_wo_1 - 1.0)/T_wo_1
struct[0].f[22,0] = (-i_d_1 + i_d_ref_1)/T_i_1
struct[0].f[23,0] = (-i_q_1 + i_q_ref_1)/T_i_1
struct[0].f[24,0] = -q_g_1_1 + q_ref_1
struct[0].f[25,0] = -p_sto_1/H_s_1
struct[0].f[26,0] = -soc_1 + soc_ref_1
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy_ini) @ np.ascontiguousarray(struct[0].y_ini)
struct[0].g[0,0] = -P_1/S_base + V_1**2*g_1_2 + V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) - S_n_1*p_g_1_1/S_base
struct[0].g[1,0] = -Q_1/S_base + V_1**2*(-b_1_2 - bs_1_2/2) + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) - S_n_1*q_g_1_1/S_base
struct[0].g[2,0] = -P_2/S_base + V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2**2*(g_1_2 + g_2_3) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) - S_n_2*p_g_2_1/S_base
struct[0].g[3,0] = -Q_2/S_base + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + V_2**2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3)) - S_n_2*q_g_2_1/S_base
struct[0].g[4,0] = -P_3/S_base + V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + V_3**2*g_2_3 - S_n_3*p_g_3_1/S_base
struct[0].g[5,0] = -Q_3/S_base + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + V_3**2*(-b_2_3 - bs_2_3/2) - S_n_3*q_g_3_1/S_base
struct[0].g[6,0] = R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2) + X1d_2*i_d_2 - e1q_2
struct[0].g[7,0] = R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2) - X1q_2*i_q_2 - e1d_2
struct[0].g[8,0] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2) - p_g_2_1
struct[0].g[9,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2) - q_g_2_1
struct[0].g[10,0] = K_a_2*(-v_c_2 + v_pss_2 + v_ref_2) + K_ai_2*xi_v_2 - v_f_2
struct[0].g[11,0] = p_c_2 - p_m_ref_2 + p_r_2 + xi_imw_2 - (omega_2 - omega_ref_2)/Droop_2
struct[0].g[12,0] = T_gov_2_2*(x_gov_1_2 - x_gov_2_2)/T_gov_3_2 - p_m_2 + x_gov_2_2
struct[0].g[13,0] = R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3) + X1d_3*i_d_3 - e1q_3
struct[0].g[14,0] = R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3) - X1q_3*i_q_3 - e1d_3
struct[0].g[15,0] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3) - p_g_3_1
struct[0].g[16,0] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3) - q_g_3_1
struct[0].g[17,0] = K_a_3*(-v_c_3 + v_pss_3 + v_ref_3) + K_ai_3*xi_v_3 - v_f_3
struct[0].g[18,0] = p_c_3 - p_m_ref_3 + p_r_3 + xi_imw_3 - (omega_3 - omega_ref_3)/Droop_3
struct[0].g[19,0] = T_gov_2_3*(x_gov_1_3 - x_gov_2_3)/T_gov_3_3 - p_m_3 + x_gov_2_3
struct[0].g[20,0] = K_sec_2*xi_freq/2 - p_r_2
struct[0].g[21,0] = K_sec_3*xi_freq/2 - p_r_3
struct[0].g[22,0] = R_v_1*i_q_ref_1 + V_1*cos(delta_1 - theta_1) + X_v_1*i_d_ref_1 - e_v_1
struct[0].g[23,0] = R_v_1*i_d_ref_1 + V_1*sin(delta_1 - theta_1) - X_v_1*i_q_ref_1
struct[0].g[24,0] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1) - p_g_1_1
struct[0].g[25,0] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1) - q_g_1_1
struct[0].g[26,0] = D2_1*(omega_v_1 - x_wo_1 - 1.0) - p_d2_1
struct[0].g[27,0] = K_q_1*(-q_g_1_1 + q_ref_1 + xi_q_1/T_q_1) - e_v_1
struct[0].g[28,0] = -i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) - i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1)) + p_src_1 + p_sto_1
struct[0].g[29,0] = Dp_ref_1 - p_m_1 + p_src_1 + Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
struct[0].g[30,0] = omega_2/2 + omega_3/2 - omega_coi
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_1
struct[0].h[1,0] = V_2
struct[0].h[2,0] = V_3
struct[0].h[3,0] = i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) + i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2))
struct[0].h[4,0] = i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) + i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3))
struct[0].h[5,0] = i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) + i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1))
struct[0].h[6,0] = Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
if mode == 10:
struct[0].Fx_ini[0,0] = -K_delta_2
struct[0].Fx_ini[0,1] = Omega_b_2
struct[0].Fx_ini[1,0] = (-V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fx_ini[1,1] = -D_2/(2*H_2)
struct[0].Fx_ini[2,2] = -1/T1d0_2
struct[0].Fx_ini[3,3] = -1/T1q0_2
struct[0].Fx_ini[4,4] = -1/T_r_2
struct[0].Fx_ini[6,6] = -1/T_gov_1_2
struct[0].Fx_ini[7,6] = 1/T_gov_3_2
struct[0].Fx_ini[7,7] = -1/T_gov_3_2
struct[0].Fx_ini[9,9] = -K_delta_3
struct[0].Fx_ini[9,10] = Omega_b_3
struct[0].Fx_ini[10,9] = (-V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fx_ini[10,10] = -D_3/(2*H_3)
struct[0].Fx_ini[11,11] = -1/T1d0_3
struct[0].Fx_ini[12,12] = -1/T1q0_3
struct[0].Fx_ini[13,13] = -1/T_r_3
struct[0].Fx_ini[15,15] = -1/T_gov_1_3
struct[0].Fx_ini[16,15] = 1/T_gov_3_3
struct[0].Fx_ini[16,16] = -1/T_gov_3_3
struct[0].Fx_ini[19,19] = -K_delta_1
struct[0].Fx_ini[19,20] = Omega_b_1
struct[0].Fx_ini[20,20] = -D1_1/(2*H_1)
struct[0].Fx_ini[21,20] = 1/T_wo_1
struct[0].Fx_ini[21,21] = -1/T_wo_1
struct[0].Fx_ini[22,22] = -1/T_i_1
struct[0].Fx_ini[23,23] = -1/T_i_1
if mode == 11:
struct[0].Fy_ini[0,30] = -Omega_b_2
struct[0].Fy_ini[1,2] = (-i_d_2*sin(delta_2 - theta_2) - i_q_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,3] = (V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,6] = (-2*R_a_2*i_d_2 - V_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,7] = (-2*R_a_2*i_q_2 - V_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,12] = 1/(2*H_2)
struct[0].Fy_ini[1,30] = D_2/(2*H_2)
struct[0].Fy_ini[2,6] = (X1d_2 - X_d_2)/T1d0_2
struct[0].Fy_ini[2,10] = 1/T1d0_2
struct[0].Fy_ini[3,7] = (-X1q_2 + X_q_2)/T1q0_2
struct[0].Fy_ini[4,2] = 1/T_r_2
struct[0].Fy_ini[5,2] = -1
struct[0].Fy_ini[6,11] = 1/T_gov_1_2
struct[0].Fy_ini[8,8] = -K_imw_2
struct[0].Fy_ini[9,30] = -Omega_b_3
struct[0].Fy_ini[10,4] = (-i_d_3*sin(delta_3 - theta_3) - i_q_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,5] = (V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,13] = (-2*R_a_3*i_d_3 - V_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,14] = (-2*R_a_3*i_q_3 - V_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,19] = 1/(2*H_3)
struct[0].Fy_ini[10,30] = D_3/(2*H_3)
struct[0].Fy_ini[11,13] = (X1d_3 - X_d_3)/T1d0_3
struct[0].Fy_ini[11,17] = 1/T1d0_3
struct[0].Fy_ini[12,14] = (-X1q_3 + X_q_3)/T1q0_3
struct[0].Fy_ini[13,4] = 1/T_r_3
struct[0].Fy_ini[14,4] = -1
struct[0].Fy_ini[15,18] = 1/T_gov_1_3
struct[0].Fy_ini[17,15] = -K_imw_3
struct[0].Fy_ini[18,30] = -1
struct[0].Fy_ini[19,24] = -D3_1
struct[0].Fy_ini[19,29] = D3_1
struct[0].Fy_ini[19,30] = -Omega_b_1
struct[0].Fy_ini[20,24] = -1/(2*H_1)
struct[0].Fy_ini[20,26] = -1/(2*H_1)
struct[0].Fy_ini[20,29] = 1/(2*H_1)
struct[0].Fy_ini[22,22] = 1/T_i_1
struct[0].Fy_ini[23,23] = 1/T_i_1
struct[0].Fy_ini[24,25] = -1
struct[0].Fy_ini[25,28] = -1/H_s_1
struct[0].Gx_ini[6,0] = -V_2*sin(delta_2 - theta_2)
struct[0].Gx_ini[6,2] = -1
struct[0].Gx_ini[7,0] = V_2*cos(delta_2 - theta_2)
struct[0].Gx_ini[7,3] = -1
struct[0].Gx_ini[8,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gx_ini[9,0] = -V_2*i_d_2*sin(delta_2 - theta_2) - V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gx_ini[10,4] = -K_a_2
struct[0].Gx_ini[10,5] = K_ai_2
struct[0].Gx_ini[11,1] = -1/Droop_2
struct[0].Gx_ini[11,8] = 1
struct[0].Gx_ini[12,6] = T_gov_2_2/T_gov_3_2
struct[0].Gx_ini[12,7] = -T_gov_2_2/T_gov_3_2 + 1
struct[0].Gx_ini[13,9] = -V_3*sin(delta_3 - theta_3)
struct[0].Gx_ini[13,11] = -1
struct[0].Gx_ini[14,9] = V_3*cos(delta_3 - theta_3)
struct[0].Gx_ini[14,12] = -1
struct[0].Gx_ini[15,9] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gx_ini[16,9] = -V_3*i_d_3*sin(delta_3 - theta_3) - V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gx_ini[17,13] = -K_a_3
struct[0].Gx_ini[17,14] = K_ai_3
struct[0].Gx_ini[18,10] = -1/Droop_3
struct[0].Gx_ini[18,17] = 1
struct[0].Gx_ini[19,15] = T_gov_2_3/T_gov_3_3
struct[0].Gx_ini[19,16] = -T_gov_2_3/T_gov_3_3 + 1
struct[0].Gx_ini[20,18] = K_sec_2/2
struct[0].Gx_ini[21,18] = K_sec_3/2
struct[0].Gx_ini[22,19] = -V_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[23,19] = V_1*cos(delta_1 - theta_1)
struct[0].Gx_ini[24,19] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[24,22] = V_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[24,23] = V_1*cos(delta_1 - theta_1)
struct[0].Gx_ini[25,19] = -V_1*i_d_1*sin(delta_1 - theta_1) - V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gx_ini[25,22] = V_1*cos(delta_1 - theta_1)
struct[0].Gx_ini[25,23] = -V_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[26,20] = D2_1
struct[0].Gx_ini[26,21] = -D2_1
struct[0].Gx_ini[27,24] = K_q_1/T_q_1
struct[0].Gx_ini[28,19] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[28,22] = -2*R_s_1*i_d_1 - V_1*sin(delta_1 - theta_1)
struct[0].Gx_ini[28,23] = -2*R_s_1*i_q_1 - V_1*cos(delta_1 - theta_1)
struct[0].Gx_ini[29,25] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (-K_p_soc_1, True)]))
struct[0].Gx_ini[29,26] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1, True)]))
struct[0].Gx_ini[30,1] = 1/2
struct[0].Gx_ini[30,10] = 1/2
struct[0].Gy_ini[0,0] = 2*V_1*g_1_2 + V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[0,1] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[0,2] = V_1*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[0,3] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[0,24] = -S_n_1/S_base
struct[0].Gy_ini[1,0] = 2*V_1*(-b_1_2 - bs_1_2/2) + V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[1,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[1,2] = V_1*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[1,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[1,25] = -S_n_1/S_base
struct[0].Gy_ini[2,0] = V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[2,1] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[2,2] = V_1*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + 2*V_2*(g_1_2 + g_2_3) + V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[2,3] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[2,4] = V_2*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[2,5] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[2,8] = -S_n_2/S_base
struct[0].Gy_ini[3,0] = V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[3,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[3,2] = V_1*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + 2*V_2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[3,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[3,4] = V_2*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[3,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[3,9] = -S_n_2/S_base
struct[0].Gy_ini[4,2] = V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[4,3] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[4,4] = V_2*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + 2*V_3*g_2_3
struct[0].Gy_ini[4,5] = V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[4,15] = -S_n_3/S_base
struct[0].Gy_ini[5,2] = V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[5,3] = V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[5,4] = V_2*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + 2*V_3*(-b_2_3 - bs_2_3/2)
struct[0].Gy_ini[5,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[5,16] = -S_n_3/S_base
struct[0].Gy_ini[6,2] = cos(delta_2 - theta_2)
struct[0].Gy_ini[6,3] = V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[6,6] = X1d_2
struct[0].Gy_ini[6,7] = R_a_2
struct[0].Gy_ini[7,2] = sin(delta_2 - theta_2)
struct[0].Gy_ini[7,3] = -V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[7,6] = R_a_2
struct[0].Gy_ini[7,7] = -X1q_2
struct[0].Gy_ini[8,2] = i_d_2*sin(delta_2 - theta_2) + i_q_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[8,3] = -V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[8,6] = V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[8,7] = V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[9,2] = i_d_2*cos(delta_2 - theta_2) - i_q_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[9,3] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[9,6] = V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[9,7] = -V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[13,4] = cos(delta_3 - theta_3)
struct[0].Gy_ini[13,5] = V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[13,13] = X1d_3
struct[0].Gy_ini[13,14] = R_a_3
struct[0].Gy_ini[14,4] = sin(delta_3 - theta_3)
struct[0].Gy_ini[14,5] = -V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[14,13] = R_a_3
struct[0].Gy_ini[14,14] = -X1q_3
struct[0].Gy_ini[15,4] = i_d_3*sin(delta_3 - theta_3) + i_q_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[15,5] = -V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[15,13] = V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[15,14] = V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[16,4] = i_d_3*cos(delta_3 - theta_3) - i_q_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[16,5] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[16,13] = V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[16,14] = -V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[22,0] = cos(delta_1 - theta_1)
struct[0].Gy_ini[22,1] = V_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[22,22] = X_v_1
struct[0].Gy_ini[22,23] = R_v_1
struct[0].Gy_ini[23,0] = sin(delta_1 - theta_1)
struct[0].Gy_ini[23,1] = -V_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[23,22] = R_v_1
struct[0].Gy_ini[23,23] = -X_v_1
struct[0].Gy_ini[24,0] = i_d_1*sin(delta_1 - theta_1) + i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[24,1] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[25,0] = i_d_1*cos(delta_1 - theta_1) - i_q_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[25,1] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[27,25] = -K_q_1
struct[0].Gy_ini[28,0] = -i_d_1*sin(delta_1 - theta_1) - i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[28,1] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
@numba.njit(cache=True)
def run(t,struct,mode):
# Parameters:
S_base = struct[0].S_base
g_1_2 = struct[0].g_1_2
b_1_2 = struct[0].b_1_2
bs_1_2 = struct[0].bs_1_2
g_2_3 = struct[0].g_2_3
b_2_3 = struct[0].b_2_3
bs_2_3 = struct[0].bs_2_3
U_1_n = struct[0].U_1_n
U_2_n = struct[0].U_2_n
U_3_n = struct[0].U_3_n
S_n_2 = struct[0].S_n_2
H_2 = struct[0].H_2
Omega_b_2 = struct[0].Omega_b_2
T1d0_2 = struct[0].T1d0_2
T1q0_2 = struct[0].T1q0_2
X_d_2 = struct[0].X_d_2
X_q_2 = struct[0].X_q_2
X1d_2 = struct[0].X1d_2
X1q_2 = struct[0].X1q_2
D_2 = struct[0].D_2
R_a_2 = struct[0].R_a_2
K_delta_2 = struct[0].K_delta_2
K_a_2 = struct[0].K_a_2
K_ai_2 = struct[0].K_ai_2
T_r_2 = struct[0].T_r_2
Droop_2 = struct[0].Droop_2
T_gov_1_2 = struct[0].T_gov_1_2
T_gov_2_2 = struct[0].T_gov_2_2
T_gov_3_2 = struct[0].T_gov_3_2
K_imw_2 = struct[0].K_imw_2
omega_ref_2 = struct[0].omega_ref_2
S_n_3 = struct[0].S_n_3
H_3 = struct[0].H_3
Omega_b_3 = struct[0].Omega_b_3
T1d0_3 = struct[0].T1d0_3
T1q0_3 = struct[0].T1q0_3
X_d_3 = struct[0].X_d_3
X_q_3 = struct[0].X_q_3
X1d_3 = struct[0].X1d_3
X1q_3 = struct[0].X1q_3
D_3 = struct[0].D_3
R_a_3 = struct[0].R_a_3
K_delta_3 = struct[0].K_delta_3
K_a_3 = struct[0].K_a_3
K_ai_3 = struct[0].K_ai_3
T_r_3 = struct[0].T_r_3
Droop_3 = struct[0].Droop_3
T_gov_1_3 = struct[0].T_gov_1_3
T_gov_2_3 = struct[0].T_gov_2_3
T_gov_3_3 = struct[0].T_gov_3_3
K_imw_3 = struct[0].K_imw_3
omega_ref_3 = struct[0].omega_ref_3
K_sec_2 = struct[0].K_sec_2
K_sec_3 = struct[0].K_sec_3
S_n_1 = struct[0].S_n_1
R_s_1 = struct[0].R_s_1
H_1 = struct[0].H_1
Omega_b_1 = struct[0].Omega_b_1
R_v_1 = struct[0].R_v_1
X_v_1 = struct[0].X_v_1
D1_1 = struct[0].D1_1
D2_1 = struct[0].D2_1
D3_1 = struct[0].D3_1
K_delta_1 = struct[0].K_delta_1
T_wo_1 = struct[0].T_wo_1
T_i_1 = struct[0].T_i_1
K_q_1 = struct[0].K_q_1
T_q_1 = struct[0].T_q_1
H_s_1 = struct[0].H_s_1
K_p_soc_1 = struct[0].K_p_soc_1
K_i_soc_1 = struct[0].K_i_soc_1
# Inputs:
P_1 = struct[0].P_1
Q_1 = struct[0].Q_1
P_2 = struct[0].P_2
Q_2 = struct[0].Q_2
P_3 = struct[0].P_3
Q_3 = struct[0].Q_3
v_ref_2 = struct[0].v_ref_2
v_pss_2 = struct[0].v_pss_2
p_c_2 = struct[0].p_c_2
v_ref_3 = struct[0].v_ref_3
v_pss_3 = struct[0].v_pss_3
p_c_3 = struct[0].p_c_3
p_in_1 = struct[0].p_in_1
Dp_ref_1 = struct[0].Dp_ref_1
q_ref_1 = struct[0].q_ref_1
p_src_1 = struct[0].p_src_1
soc_ref_1 = struct[0].soc_ref_1
# Dynamical states:
delta_2 = struct[0].x[0,0]
omega_2 = struct[0].x[1,0]
e1q_2 = struct[0].x[2,0]
e1d_2 = struct[0].x[3,0]
v_c_2 = struct[0].x[4,0]
xi_v_2 = struct[0].x[5,0]
x_gov_1_2 = struct[0].x[6,0]
x_gov_2_2 = struct[0].x[7,0]
xi_imw_2 = struct[0].x[8,0]
delta_3 = struct[0].x[9,0]
omega_3 = struct[0].x[10,0]
e1q_3 = struct[0].x[11,0]
e1d_3 = struct[0].x[12,0]
v_c_3 = struct[0].x[13,0]
xi_v_3 = struct[0].x[14,0]
x_gov_1_3 = struct[0].x[15,0]
x_gov_2_3 = struct[0].x[16,0]
xi_imw_3 = struct[0].x[17,0]
xi_freq = struct[0].x[18,0]
delta_1 = struct[0].x[19,0]
omega_v_1 = struct[0].x[20,0]
x_wo_1 = struct[0].x[21,0]
i_d_1 = struct[0].x[22,0]
i_q_1 = struct[0].x[23,0]
xi_q_1 = struct[0].x[24,0]
soc_1 = struct[0].x[25,0]
xi_soc_1 = struct[0].x[26,0]
# Algebraic states:
V_1 = struct[0].y_run[0,0]
theta_1 = struct[0].y_run[1,0]
V_2 = struct[0].y_run[2,0]
theta_2 = struct[0].y_run[3,0]
V_3 = struct[0].y_run[4,0]
theta_3 = struct[0].y_run[5,0]
i_d_2 = struct[0].y_run[6,0]
i_q_2 = struct[0].y_run[7,0]
p_g_2_1 = struct[0].y_run[8,0]
q_g_2_1 = struct[0].y_run[9,0]
v_f_2 = struct[0].y_run[10,0]
p_m_ref_2 = struct[0].y_run[11,0]
p_m_2 = struct[0].y_run[12,0]
i_d_3 = struct[0].y_run[13,0]
i_q_3 = struct[0].y_run[14,0]
p_g_3_1 = struct[0].y_run[15,0]
q_g_3_1 = struct[0].y_run[16,0]
v_f_3 = struct[0].y_run[17,0]
p_m_ref_3 = struct[0].y_run[18,0]
p_m_3 = struct[0].y_run[19,0]
p_r_2 = struct[0].y_run[20,0]
p_r_3 = struct[0].y_run[21,0]
i_d_ref_1 = struct[0].y_run[22,0]
i_q_ref_1 = struct[0].y_run[23,0]
p_g_1_1 = struct[0].y_run[24,0]
q_g_1_1 = struct[0].y_run[25,0]
p_d2_1 = struct[0].y_run[26,0]
e_v_1 = struct[0].y_run[27,0]
p_sto_1 = struct[0].y_run[28,0]
p_m_1 = struct[0].y_run[29,0]
omega_coi = struct[0].y_run[30,0]
struct[0].u_run[0,0] = P_1
struct[0].u_run[1,0] = Q_1
struct[0].u_run[2,0] = P_2
struct[0].u_run[3,0] = Q_2
struct[0].u_run[4,0] = P_3
struct[0].u_run[5,0] = Q_3
struct[0].u_run[6,0] = v_ref_2
struct[0].u_run[7,0] = v_pss_2
struct[0].u_run[8,0] = p_c_2
struct[0].u_run[9,0] = v_ref_3
struct[0].u_run[10,0] = v_pss_3
struct[0].u_run[11,0] = p_c_3
struct[0].u_run[12,0] = p_in_1
struct[0].u_run[13,0] = Dp_ref_1
struct[0].u_run[14,0] = q_ref_1
struct[0].u_run[15,0] = p_src_1
struct[0].u_run[16,0] = soc_ref_1
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_2*delta_2 + Omega_b_2*(omega_2 - omega_coi)
struct[0].f[1,0] = (-D_2*(omega_2 - omega_coi) - i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) - i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2)) + p_m_2)/(2*H_2)
struct[0].f[2,0] = (-e1q_2 - i_d_2*(-X1d_2 + X_d_2) + v_f_2)/T1d0_2
struct[0].f[3,0] = (-e1d_2 + i_q_2*(-X1q_2 + X_q_2))/T1q0_2
struct[0].f[4,0] = (V_2 - v_c_2)/T_r_2
struct[0].f[5,0] = -V_2 + v_ref_2
struct[0].f[6,0] = (p_m_ref_2 - x_gov_1_2)/T_gov_1_2
struct[0].f[7,0] = (x_gov_1_2 - x_gov_2_2)/T_gov_3_2
struct[0].f[8,0] = K_imw_2*(p_c_2 - p_g_2_1) - 1.0e-6*xi_imw_2
struct[0].f[9,0] = -K_delta_3*delta_3 + Omega_b_3*(omega_3 - omega_coi)
struct[0].f[10,0] = (-D_3*(omega_3 - omega_coi) - i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) - i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3)) + p_m_3)/(2*H_3)
struct[0].f[11,0] = (-e1q_3 - i_d_3*(-X1d_3 + X_d_3) + v_f_3)/T1d0_3
struct[0].f[12,0] = (-e1d_3 + i_q_3*(-X1q_3 + X_q_3))/T1q0_3
struct[0].f[13,0] = (V_3 - v_c_3)/T_r_3
struct[0].f[14,0] = -V_3 + v_ref_3
struct[0].f[15,0] = (p_m_ref_3 - x_gov_1_3)/T_gov_1_3
struct[0].f[16,0] = (x_gov_1_3 - x_gov_2_3)/T_gov_3_3
struct[0].f[17,0] = K_imw_3*(p_c_3 - p_g_3_1) - 1.0e-6*xi_imw_3
struct[0].f[18,0] = 1 - omega_coi
struct[0].f[19,0] = D3_1*(-p_g_1_1 + p_m_1) - K_delta_1*delta_1 + Omega_b_1*(-omega_coi + omega_v_1)
struct[0].f[20,0] = (-D1_1*(omega_v_1 - 1) - p_d2_1 - p_g_1_1 + p_m_1)/(2*H_1)
struct[0].f[21,0] = (omega_v_1 - x_wo_1 - 1.0)/T_wo_1
struct[0].f[22,0] = (-i_d_1 + i_d_ref_1)/T_i_1
struct[0].f[23,0] = (-i_q_1 + i_q_ref_1)/T_i_1
struct[0].f[24,0] = -q_g_1_1 + q_ref_1
struct[0].f[25,0] = -p_sto_1/H_s_1
struct[0].f[26,0] = -soc_1 + soc_ref_1
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy) @ np.ascontiguousarray(struct[0].y_run) + np.ascontiguousarray(struct[0].Gu) @ np.ascontiguousarray(struct[0].u_run)
struct[0].g[0,0] = -P_1/S_base + V_1**2*g_1_2 + V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) - S_n_1*p_g_1_1/S_base
struct[0].g[1,0] = -Q_1/S_base + V_1**2*(-b_1_2 - bs_1_2/2) + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) - S_n_1*q_g_1_1/S_base
struct[0].g[2,0] = -P_2/S_base + V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2**2*(g_1_2 + g_2_3) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) - S_n_2*p_g_2_1/S_base
struct[0].g[3,0] = -Q_2/S_base + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + V_2**2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3)) - S_n_2*q_g_2_1/S_base
struct[0].g[4,0] = -P_3/S_base + V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + V_3**2*g_2_3 - S_n_3*p_g_3_1/S_base
struct[0].g[5,0] = -Q_3/S_base + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + V_3**2*(-b_2_3 - bs_2_3/2) - S_n_3*q_g_3_1/S_base
struct[0].g[6,0] = R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2) + X1d_2*i_d_2 - e1q_2
struct[0].g[7,0] = R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2) - X1q_2*i_q_2 - e1d_2
struct[0].g[8,0] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2) - p_g_2_1
struct[0].g[9,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2) - q_g_2_1
struct[0].g[10,0] = K_a_2*(-v_c_2 + v_pss_2 + v_ref_2) + K_ai_2*xi_v_2 - v_f_2
struct[0].g[11,0] = p_c_2 - p_m_ref_2 + p_r_2 + xi_imw_2 - (omega_2 - omega_ref_2)/Droop_2
struct[0].g[12,0] = T_gov_2_2*(x_gov_1_2 - x_gov_2_2)/T_gov_3_2 - p_m_2 + x_gov_2_2
struct[0].g[13,0] = R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3) + X1d_3*i_d_3 - e1q_3
struct[0].g[14,0] = R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3) - X1q_3*i_q_3 - e1d_3
struct[0].g[15,0] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3) - p_g_3_1
struct[0].g[16,0] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3) - q_g_3_1
struct[0].g[17,0] = K_a_3*(-v_c_3 + v_pss_3 + v_ref_3) + K_ai_3*xi_v_3 - v_f_3
struct[0].g[18,0] = p_c_3 - p_m_ref_3 + p_r_3 + xi_imw_3 - (omega_3 - omega_ref_3)/Droop_3
struct[0].g[19,0] = T_gov_2_3*(x_gov_1_3 - x_gov_2_3)/T_gov_3_3 - p_m_3 + x_gov_2_3
struct[0].g[20,0] = K_sec_2*xi_freq/2 - p_r_2
struct[0].g[21,0] = K_sec_3*xi_freq/2 - p_r_3
struct[0].g[22,0] = R_v_1*i_q_ref_1 + V_1*cos(delta_1 - theta_1) + X_v_1*i_d_ref_1 - e_v_1
struct[0].g[23,0] = R_v_1*i_d_ref_1 + V_1*sin(delta_1 - theta_1) - X_v_1*i_q_ref_1
struct[0].g[24,0] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1) - p_g_1_1
struct[0].g[25,0] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1) - q_g_1_1
struct[0].g[26,0] = D2_1*(omega_v_1 - x_wo_1 - 1.0) - p_d2_1
struct[0].g[27,0] = K_q_1*(-q_g_1_1 + q_ref_1 + xi_q_1/T_q_1) - e_v_1
struct[0].g[28,0] = -i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) - i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1)) + p_src_1 + p_sto_1
struct[0].g[29,0] = Dp_ref_1 - p_m_1 + p_src_1 + Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
struct[0].g[30,0] = omega_2/2 + omega_3/2 - omega_coi
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_1
struct[0].h[1,0] = V_2
struct[0].h[2,0] = V_3
struct[0].h[3,0] = i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) + i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2))
struct[0].h[4,0] = i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) + i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3))
struct[0].h[5,0] = i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) + i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1))
struct[0].h[6,0] = Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
if mode == 10:
struct[0].Fx[0,0] = -K_delta_2
struct[0].Fx[0,1] = Omega_b_2
struct[0].Fx[1,0] = (-V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fx[1,1] = -D_2/(2*H_2)
struct[0].Fx[2,2] = -1/T1d0_2
struct[0].Fx[3,3] = -1/T1q0_2
struct[0].Fx[4,4] = -1/T_r_2
struct[0].Fx[6,6] = -1/T_gov_1_2
struct[0].Fx[7,6] = 1/T_gov_3_2
struct[0].Fx[7,7] = -1/T_gov_3_2
struct[0].Fx[9,9] = -K_delta_3
struct[0].Fx[9,10] = Omega_b_3
struct[0].Fx[10,9] = (-V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fx[10,10] = -D_3/(2*H_3)
struct[0].Fx[11,11] = -1/T1d0_3
struct[0].Fx[12,12] = -1/T1q0_3
struct[0].Fx[13,13] = -1/T_r_3
struct[0].Fx[15,15] = -1/T_gov_1_3
struct[0].Fx[16,15] = 1/T_gov_3_3
struct[0].Fx[16,16] = -1/T_gov_3_3
struct[0].Fx[19,19] = -K_delta_1
struct[0].Fx[19,20] = Omega_b_1
struct[0].Fx[20,20] = -D1_1/(2*H_1)
struct[0].Fx[21,20] = 1/T_wo_1
struct[0].Fx[21,21] = -1/T_wo_1
struct[0].Fx[22,22] = -1/T_i_1
struct[0].Fx[23,23] = -1/T_i_1
if mode == 11:
struct[0].Fy[0,30] = -Omega_b_2
struct[0].Fy[1,2] = (-i_d_2*sin(delta_2 - theta_2) - i_q_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,3] = (V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,6] = (-2*R_a_2*i_d_2 - V_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,7] = (-2*R_a_2*i_q_2 - V_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,12] = 1/(2*H_2)
struct[0].Fy[1,30] = D_2/(2*H_2)
struct[0].Fy[2,6] = (X1d_2 - X_d_2)/T1d0_2
struct[0].Fy[2,10] = 1/T1d0_2
struct[0].Fy[3,7] = (-X1q_2 + X_q_2)/T1q0_2
struct[0].Fy[4,2] = 1/T_r_2
struct[0].Fy[5,2] = -1
struct[0].Fy[6,11] = 1/T_gov_1_2
struct[0].Fy[8,8] = -K_imw_2
struct[0].Fy[9,30] = -Omega_b_3
struct[0].Fy[10,4] = (-i_d_3*sin(delta_3 - theta_3) - i_q_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,5] = (V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,13] = (-2*R_a_3*i_d_3 - V_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,14] = (-2*R_a_3*i_q_3 - V_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,19] = 1/(2*H_3)
struct[0].Fy[10,30] = D_3/(2*H_3)
struct[0].Fy[11,13] = (X1d_3 - X_d_3)/T1d0_3
struct[0].Fy[11,17] = 1/T1d0_3
struct[0].Fy[12,14] = (-X1q_3 + X_q_3)/T1q0_3
struct[0].Fy[13,4] = 1/T_r_3
struct[0].Fy[14,4] = -1
struct[0].Fy[15,18] = 1/T_gov_1_3
struct[0].Fy[17,15] = -K_imw_3
struct[0].Fy[18,30] = -1
struct[0].Fy[19,24] = -D3_1
struct[0].Fy[19,29] = D3_1
struct[0].Fy[19,30] = -Omega_b_1
struct[0].Fy[20,24] = -1/(2*H_1)
struct[0].Fy[20,26] = -1/(2*H_1)
struct[0].Fy[20,29] = 1/(2*H_1)
struct[0].Fy[22,22] = 1/T_i_1
struct[0].Fy[23,23] = 1/T_i_1
struct[0].Fy[24,25] = -1
struct[0].Fy[25,28] = -1/H_s_1
struct[0].Gx[6,0] = -V_2*sin(delta_2 - theta_2)
struct[0].Gx[6,2] = -1
struct[0].Gx[7,0] = V_2*cos(delta_2 - theta_2)
struct[0].Gx[7,3] = -1
struct[0].Gx[8,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gx[9,0] = -V_2*i_d_2*sin(delta_2 - theta_2) - V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gx[10,4] = -K_a_2
struct[0].Gx[10,5] = K_ai_2
struct[0].Gx[11,1] = -1/Droop_2
struct[0].Gx[11,8] = 1
struct[0].Gx[12,6] = T_gov_2_2/T_gov_3_2
struct[0].Gx[12,7] = -T_gov_2_2/T_gov_3_2 + 1
struct[0].Gx[13,9] = -V_3*sin(delta_3 - theta_3)
struct[0].Gx[13,11] = -1
struct[0].Gx[14,9] = V_3*cos(delta_3 - theta_3)
struct[0].Gx[14,12] = -1
struct[0].Gx[15,9] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gx[16,9] = -V_3*i_d_3*sin(delta_3 - theta_3) - V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gx[17,13] = -K_a_3
struct[0].Gx[17,14] = K_ai_3
struct[0].Gx[18,10] = -1/Droop_3
struct[0].Gx[18,17] = 1
struct[0].Gx[19,15] = T_gov_2_3/T_gov_3_3
struct[0].Gx[19,16] = -T_gov_2_3/T_gov_3_3 + 1
struct[0].Gx[20,18] = K_sec_2/2
struct[0].Gx[21,18] = K_sec_3/2
struct[0].Gx[22,19] = -V_1*sin(delta_1 - theta_1)
struct[0].Gx[23,19] = V_1*cos(delta_1 - theta_1)
struct[0].Gx[24,19] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gx[24,22] = V_1*sin(delta_1 - theta_1)
struct[0].Gx[24,23] = V_1*cos(delta_1 - theta_1)
struct[0].Gx[25,19] = -V_1*i_d_1*sin(delta_1 - theta_1) - V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gx[25,22] = V_1*cos(delta_1 - theta_1)
struct[0].Gx[25,23] = -V_1*sin(delta_1 - theta_1)
struct[0].Gx[26,20] = D2_1
struct[0].Gx[26,21] = -D2_1
struct[0].Gx[27,24] = K_q_1/T_q_1
struct[0].Gx[28,19] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gx[28,22] = -2*R_s_1*i_d_1 - V_1*sin(delta_1 - theta_1)
struct[0].Gx[28,23] = -2*R_s_1*i_q_1 - V_1*cos(delta_1 - theta_1)
struct[0].Gx[29,25] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (-K_p_soc_1, True)]))
struct[0].Gx[29,26] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1, True)]))
struct[0].Gx[30,1] = 1/2
struct[0].Gx[30,10] = 1/2
struct[0].Gy[0,0] = 2*V_1*g_1_2 + V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[0,1] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[0,2] = V_1*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[0,3] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[0,24] = -S_n_1/S_base
struct[0].Gy[1,0] = 2*V_1*(-b_1_2 - bs_1_2/2) + V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[1,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[1,2] = V_1*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[1,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[1,25] = -S_n_1/S_base
struct[0].Gy[2,0] = V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[2,1] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[2,2] = V_1*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + 2*V_2*(g_1_2 + g_2_3) + V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[2,3] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[2,4] = V_2*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[2,5] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[2,8] = -S_n_2/S_base
struct[0].Gy[3,0] = V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[3,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[3,2] = V_1*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + 2*V_2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[3,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[3,4] = V_2*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[3,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[3,9] = -S_n_2/S_base
struct[0].Gy[4,2] = V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[4,3] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[4,4] = V_2*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + 2*V_3*g_2_3
struct[0].Gy[4,5] = V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[4,15] = -S_n_3/S_base
struct[0].Gy[5,2] = V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[5,3] = V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[5,4] = V_2*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + 2*V_3*(-b_2_3 - bs_2_3/2)
struct[0].Gy[5,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[5,16] = -S_n_3/S_base
struct[0].Gy[6,2] = cos(delta_2 - theta_2)
struct[0].Gy[6,3] = V_2*sin(delta_2 - theta_2)
struct[0].Gy[6,6] = X1d_2
struct[0].Gy[6,7] = R_a_2
struct[0].Gy[7,2] = sin(delta_2 - theta_2)
struct[0].Gy[7,3] = -V_2*cos(delta_2 - theta_2)
struct[0].Gy[7,6] = R_a_2
struct[0].Gy[7,7] = -X1q_2
struct[0].Gy[8,2] = i_d_2*sin(delta_2 - theta_2) + i_q_2*cos(delta_2 - theta_2)
struct[0].Gy[8,3] = -V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gy[8,6] = V_2*sin(delta_2 - theta_2)
struct[0].Gy[8,7] = V_2*cos(delta_2 - theta_2)
struct[0].Gy[9,2] = i_d_2*cos(delta_2 - theta_2) - i_q_2*sin(delta_2 - theta_2)
struct[0].Gy[9,3] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gy[9,6] = V_2*cos(delta_2 - theta_2)
struct[0].Gy[9,7] = -V_2*sin(delta_2 - theta_2)
struct[0].Gy[13,4] = cos(delta_3 - theta_3)
struct[0].Gy[13,5] = V_3*sin(delta_3 - theta_3)
struct[0].Gy[13,13] = X1d_3
struct[0].Gy[13,14] = R_a_3
struct[0].Gy[14,4] = sin(delta_3 - theta_3)
struct[0].Gy[14,5] = -V_3*cos(delta_3 - theta_3)
struct[0].Gy[14,13] = R_a_3
struct[0].Gy[14,14] = -X1q_3
struct[0].Gy[15,4] = i_d_3*sin(delta_3 - theta_3) + i_q_3*cos(delta_3 - theta_3)
struct[0].Gy[15,5] = -V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gy[15,13] = V_3*sin(delta_3 - theta_3)
struct[0].Gy[15,14] = V_3*cos(delta_3 - theta_3)
struct[0].Gy[16,4] = i_d_3*cos(delta_3 - theta_3) - i_q_3*sin(delta_3 - theta_3)
struct[0].Gy[16,5] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gy[16,13] = V_3*cos(delta_3 - theta_3)
struct[0].Gy[16,14] = -V_3*sin(delta_3 - theta_3)
struct[0].Gy[22,0] = cos(delta_1 - theta_1)
struct[0].Gy[22,1] = V_1*sin(delta_1 - theta_1)
struct[0].Gy[22,22] = X_v_1
struct[0].Gy[22,23] = R_v_1
struct[0].Gy[23,0] = sin(delta_1 - theta_1)
struct[0].Gy[23,1] = -V_1*cos(delta_1 - theta_1)
struct[0].Gy[23,22] = R_v_1
struct[0].Gy[23,23] = -X_v_1
struct[0].Gy[24,0] = i_d_1*sin(delta_1 - theta_1) + i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[24,1] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy[25,0] = i_d_1*cos(delta_1 - theta_1) - i_q_1*sin(delta_1 - theta_1)
struct[0].Gy[25,1] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[27,25] = -K_q_1
struct[0].Gy[28,0] = -i_d_1*sin(delta_1 - theta_1) - i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[28,1] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
if mode > 12:
struct[0].Fu[5,6] = 1
struct[0].Fu[8,8] = K_imw_2
struct[0].Fu[14,9] = 1
struct[0].Fu[17,11] = K_imw_3
struct[0].Fu[24,14] = 1
struct[0].Fu[26,16] = 1
struct[0].Gu[0,0] = -1/S_base
struct[0].Gu[1,1] = -1/S_base
struct[0].Gu[2,2] = -1/S_base
struct[0].Gu[3,3] = -1/S_base
struct[0].Gu[4,4] = -1/S_base
struct[0].Gu[5,5] = -1/S_base
struct[0].Gu[10,6] = K_a_2
struct[0].Gu[10,7] = K_a_2
struct[0].Gu[17,9] = K_a_3
struct[0].Gu[17,10] = K_a_3
struct[0].Gu[27,14] = K_q_1
struct[0].Gu[29,16] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_p_soc_1, True)]))
struct[0].Hx[3,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Hx[4,9] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Hx[5,19] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Hx[5,22] = 2*R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)
struct[0].Hx[5,23] = 2*R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1)
struct[0].Hx[6,25] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (-K_p_soc_1, True)]))
struct[0].Hx[6,26] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1, True)]))
struct[0].Hy[0,0] = 1
struct[0].Hy[1,2] = 1
struct[0].Hy[2,4] = 1
struct[0].Hy[3,2] = i_d_2*sin(delta_2 - theta_2) + i_q_2*cos(delta_2 - theta_2)
struct[0].Hy[3,3] = -V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Hy[3,6] = 2*R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)
struct[0].Hy[3,7] = 2*R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2)
struct[0].Hy[4,4] = i_d_3*sin(delta_3 - theta_3) + i_q_3*cos(delta_3 - theta_3)
struct[0].Hy[4,5] = -V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Hy[4,13] = 2*R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)
struct[0].Hy[4,14] = 2*R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3)
struct[0].Hy[5,0] = i_d_1*sin(delta_1 - theta_1) + i_q_1*cos(delta_1 - theta_1)
struct[0].Hy[5,1] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Hu[6,16] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_p_soc_1, True)]))
def ini_nn(struct,mode):
# Parameters:
S_base = struct[0].S_base
g_1_2 = struct[0].g_1_2
b_1_2 = struct[0].b_1_2
bs_1_2 = struct[0].bs_1_2
g_2_3 = struct[0].g_2_3
b_2_3 = struct[0].b_2_3
bs_2_3 = struct[0].bs_2_3
U_1_n = struct[0].U_1_n
U_2_n = struct[0].U_2_n
U_3_n = struct[0].U_3_n
S_n_2 = struct[0].S_n_2
H_2 = struct[0].H_2
Omega_b_2 = struct[0].Omega_b_2
T1d0_2 = struct[0].T1d0_2
T1q0_2 = struct[0].T1q0_2
X_d_2 = struct[0].X_d_2
X_q_2 = struct[0].X_q_2
X1d_2 = struct[0].X1d_2
X1q_2 = struct[0].X1q_2
D_2 = struct[0].D_2
R_a_2 = struct[0].R_a_2
K_delta_2 = struct[0].K_delta_2
K_a_2 = struct[0].K_a_2
K_ai_2 = struct[0].K_ai_2
T_r_2 = struct[0].T_r_2
Droop_2 = struct[0].Droop_2
T_gov_1_2 = struct[0].T_gov_1_2
T_gov_2_2 = struct[0].T_gov_2_2
T_gov_3_2 = struct[0].T_gov_3_2
K_imw_2 = struct[0].K_imw_2
omega_ref_2 = struct[0].omega_ref_2
S_n_3 = struct[0].S_n_3
H_3 = struct[0].H_3
Omega_b_3 = struct[0].Omega_b_3
T1d0_3 = struct[0].T1d0_3
T1q0_3 = struct[0].T1q0_3
X_d_3 = struct[0].X_d_3
X_q_3 = struct[0].X_q_3
X1d_3 = struct[0].X1d_3
X1q_3 = struct[0].X1q_3
D_3 = struct[0].D_3
R_a_3 = struct[0].R_a_3
K_delta_3 = struct[0].K_delta_3
K_a_3 = struct[0].K_a_3
K_ai_3 = struct[0].K_ai_3
T_r_3 = struct[0].T_r_3
Droop_3 = struct[0].Droop_3
T_gov_1_3 = struct[0].T_gov_1_3
T_gov_2_3 = struct[0].T_gov_2_3
T_gov_3_3 = struct[0].T_gov_3_3
K_imw_3 = struct[0].K_imw_3
omega_ref_3 = struct[0].omega_ref_3
K_sec_2 = struct[0].K_sec_2
K_sec_3 = struct[0].K_sec_3
S_n_1 = struct[0].S_n_1
R_s_1 = struct[0].R_s_1
H_1 = struct[0].H_1
Omega_b_1 = struct[0].Omega_b_1
R_v_1 = struct[0].R_v_1
X_v_1 = struct[0].X_v_1
D1_1 = struct[0].D1_1
D2_1 = struct[0].D2_1
D3_1 = struct[0].D3_1
K_delta_1 = struct[0].K_delta_1
T_wo_1 = struct[0].T_wo_1
T_i_1 = struct[0].T_i_1
K_q_1 = struct[0].K_q_1
T_q_1 = struct[0].T_q_1
H_s_1 = struct[0].H_s_1
K_p_soc_1 = struct[0].K_p_soc_1
K_i_soc_1 = struct[0].K_i_soc_1
# Inputs:
P_1 = struct[0].P_1
Q_1 = struct[0].Q_1
P_2 = struct[0].P_2
Q_2 = struct[0].Q_2
P_3 = struct[0].P_3
Q_3 = struct[0].Q_3
v_ref_2 = struct[0].v_ref_2
v_pss_2 = struct[0].v_pss_2
p_c_2 = struct[0].p_c_2
v_ref_3 = struct[0].v_ref_3
v_pss_3 = struct[0].v_pss_3
p_c_3 = struct[0].p_c_3
p_in_1 = struct[0].p_in_1
Dp_ref_1 = struct[0].Dp_ref_1
q_ref_1 = struct[0].q_ref_1
p_src_1 = struct[0].p_src_1
soc_ref_1 = struct[0].soc_ref_1
# Dynamical states:
delta_2 = struct[0].x[0,0]
omega_2 = struct[0].x[1,0]
e1q_2 = struct[0].x[2,0]
e1d_2 = struct[0].x[3,0]
v_c_2 = struct[0].x[4,0]
xi_v_2 = struct[0].x[5,0]
x_gov_1_2 = struct[0].x[6,0]
x_gov_2_2 = struct[0].x[7,0]
xi_imw_2 = struct[0].x[8,0]
delta_3 = struct[0].x[9,0]
omega_3 = struct[0].x[10,0]
e1q_3 = struct[0].x[11,0]
e1d_3 = struct[0].x[12,0]
v_c_3 = struct[0].x[13,0]
xi_v_3 = struct[0].x[14,0]
x_gov_1_3 = struct[0].x[15,0]
x_gov_2_3 = struct[0].x[16,0]
xi_imw_3 = struct[0].x[17,0]
xi_freq = struct[0].x[18,0]
delta_1 = struct[0].x[19,0]
omega_v_1 = struct[0].x[20,0]
x_wo_1 = struct[0].x[21,0]
i_d_1 = struct[0].x[22,0]
i_q_1 = struct[0].x[23,0]
xi_q_1 = struct[0].x[24,0]
soc_1 = struct[0].x[25,0]
xi_soc_1 = struct[0].x[26,0]
# Algebraic states:
V_1 = struct[0].y_ini[0,0]
theta_1 = struct[0].y_ini[1,0]
V_2 = struct[0].y_ini[2,0]
theta_2 = struct[0].y_ini[3,0]
V_3 = struct[0].y_ini[4,0]
theta_3 = struct[0].y_ini[5,0]
i_d_2 = struct[0].y_ini[6,0]
i_q_2 = struct[0].y_ini[7,0]
p_g_2_1 = struct[0].y_ini[8,0]
q_g_2_1 = struct[0].y_ini[9,0]
v_f_2 = struct[0].y_ini[10,0]
p_m_ref_2 = struct[0].y_ini[11,0]
p_m_2 = struct[0].y_ini[12,0]
i_d_3 = struct[0].y_ini[13,0]
i_q_3 = struct[0].y_ini[14,0]
p_g_3_1 = struct[0].y_ini[15,0]
q_g_3_1 = struct[0].y_ini[16,0]
v_f_3 = struct[0].y_ini[17,0]
p_m_ref_3 = struct[0].y_ini[18,0]
p_m_3 = struct[0].y_ini[19,0]
p_r_2 = struct[0].y_ini[20,0]
p_r_3 = struct[0].y_ini[21,0]
i_d_ref_1 = struct[0].y_ini[22,0]
i_q_ref_1 = struct[0].y_ini[23,0]
p_g_1_1 = struct[0].y_ini[24,0]
q_g_1_1 = struct[0].y_ini[25,0]
p_d2_1 = struct[0].y_ini[26,0]
e_v_1 = struct[0].y_ini[27,0]
p_sto_1 = struct[0].y_ini[28,0]
p_m_1 = struct[0].y_ini[29,0]
omega_coi = struct[0].y_ini[30,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_2*delta_2 + Omega_b_2*(omega_2 - omega_coi)
struct[0].f[1,0] = (-D_2*(omega_2 - omega_coi) - i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) - i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2)) + p_m_2)/(2*H_2)
struct[0].f[2,0] = (-e1q_2 - i_d_2*(-X1d_2 + X_d_2) + v_f_2)/T1d0_2
struct[0].f[3,0] = (-e1d_2 + i_q_2*(-X1q_2 + X_q_2))/T1q0_2
struct[0].f[4,0] = (V_2 - v_c_2)/T_r_2
struct[0].f[5,0] = -V_2 + v_ref_2
struct[0].f[6,0] = (p_m_ref_2 - x_gov_1_2)/T_gov_1_2
struct[0].f[7,0] = (x_gov_1_2 - x_gov_2_2)/T_gov_3_2
struct[0].f[8,0] = K_imw_2*(p_c_2 - p_g_2_1) - 1.0e-6*xi_imw_2
struct[0].f[9,0] = -K_delta_3*delta_3 + Omega_b_3*(omega_3 - omega_coi)
struct[0].f[10,0] = (-D_3*(omega_3 - omega_coi) - i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) - i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3)) + p_m_3)/(2*H_3)
struct[0].f[11,0] = (-e1q_3 - i_d_3*(-X1d_3 + X_d_3) + v_f_3)/T1d0_3
struct[0].f[12,0] = (-e1d_3 + i_q_3*(-X1q_3 + X_q_3))/T1q0_3
struct[0].f[13,0] = (V_3 - v_c_3)/T_r_3
struct[0].f[14,0] = -V_3 + v_ref_3
struct[0].f[15,0] = (p_m_ref_3 - x_gov_1_3)/T_gov_1_3
struct[0].f[16,0] = (x_gov_1_3 - x_gov_2_3)/T_gov_3_3
struct[0].f[17,0] = K_imw_3*(p_c_3 - p_g_3_1) - 1.0e-6*xi_imw_3
struct[0].f[18,0] = 1 - omega_coi
struct[0].f[19,0] = D3_1*(-p_g_1_1 + p_m_1) - K_delta_1*delta_1 + Omega_b_1*(-omega_coi + omega_v_1)
struct[0].f[20,0] = (-D1_1*(omega_v_1 - 1) - p_d2_1 - p_g_1_1 + p_m_1)/(2*H_1)
struct[0].f[21,0] = (omega_v_1 - x_wo_1 - 1.0)/T_wo_1
struct[0].f[22,0] = (-i_d_1 + i_d_ref_1)/T_i_1
struct[0].f[23,0] = (-i_q_1 + i_q_ref_1)/T_i_1
struct[0].f[24,0] = -q_g_1_1 + q_ref_1
struct[0].f[25,0] = -p_sto_1/H_s_1
struct[0].f[26,0] = -soc_1 + soc_ref_1
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = -P_1/S_base + V_1**2*g_1_2 + V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) - S_n_1*p_g_1_1/S_base
struct[0].g[1,0] = -Q_1/S_base + V_1**2*(-b_1_2 - bs_1_2/2) + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) - S_n_1*q_g_1_1/S_base
struct[0].g[2,0] = -P_2/S_base + V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2**2*(g_1_2 + g_2_3) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) - S_n_2*p_g_2_1/S_base
struct[0].g[3,0] = -Q_2/S_base + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + V_2**2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3)) - S_n_2*q_g_2_1/S_base
struct[0].g[4,0] = -P_3/S_base + V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + V_3**2*g_2_3 - S_n_3*p_g_3_1/S_base
struct[0].g[5,0] = -Q_3/S_base + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + V_3**2*(-b_2_3 - bs_2_3/2) - S_n_3*q_g_3_1/S_base
struct[0].g[6,0] = R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2) + X1d_2*i_d_2 - e1q_2
struct[0].g[7,0] = R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2) - X1q_2*i_q_2 - e1d_2
struct[0].g[8,0] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2) - p_g_2_1
struct[0].g[9,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2) - q_g_2_1
struct[0].g[10,0] = K_a_2*(-v_c_2 + v_pss_2 + v_ref_2) + K_ai_2*xi_v_2 - v_f_2
struct[0].g[11,0] = p_c_2 - p_m_ref_2 + p_r_2 + xi_imw_2 - (omega_2 - omega_ref_2)/Droop_2
struct[0].g[12,0] = T_gov_2_2*(x_gov_1_2 - x_gov_2_2)/T_gov_3_2 - p_m_2 + x_gov_2_2
struct[0].g[13,0] = R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3) + X1d_3*i_d_3 - e1q_3
struct[0].g[14,0] = R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3) - X1q_3*i_q_3 - e1d_3
struct[0].g[15,0] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3) - p_g_3_1
struct[0].g[16,0] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3) - q_g_3_1
struct[0].g[17,0] = K_a_3*(-v_c_3 + v_pss_3 + v_ref_3) + K_ai_3*xi_v_3 - v_f_3
struct[0].g[18,0] = p_c_3 - p_m_ref_3 + p_r_3 + xi_imw_3 - (omega_3 - omega_ref_3)/Droop_3
struct[0].g[19,0] = T_gov_2_3*(x_gov_1_3 - x_gov_2_3)/T_gov_3_3 - p_m_3 + x_gov_2_3
struct[0].g[20,0] = K_sec_2*xi_freq/2 - p_r_2
struct[0].g[21,0] = K_sec_3*xi_freq/2 - p_r_3
struct[0].g[22,0] = R_v_1*i_q_ref_1 + V_1*cos(delta_1 - theta_1) + X_v_1*i_d_ref_1 - e_v_1
struct[0].g[23,0] = R_v_1*i_d_ref_1 + V_1*sin(delta_1 - theta_1) - X_v_1*i_q_ref_1
struct[0].g[24,0] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1) - p_g_1_1
struct[0].g[25,0] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1) - q_g_1_1
struct[0].g[26,0] = D2_1*(omega_v_1 - x_wo_1 - 1.0) - p_d2_1
struct[0].g[27,0] = K_q_1*(-q_g_1_1 + q_ref_1 + xi_q_1/T_q_1) - e_v_1
struct[0].g[28,0] = -i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) - i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1)) + p_src_1 + p_sto_1
struct[0].g[29,0] = Dp_ref_1 - p_m_1 + p_src_1 + Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
struct[0].g[30,0] = omega_2/2 + omega_3/2 - omega_coi
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_1
struct[0].h[1,0] = V_2
struct[0].h[2,0] = V_3
struct[0].h[3,0] = i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) + i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2))
struct[0].h[4,0] = i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) + i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3))
struct[0].h[5,0] = i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) + i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1))
struct[0].h[6,0] = Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
if mode == 10:
struct[0].Fx_ini[0,0] = -K_delta_2
struct[0].Fx_ini[0,1] = Omega_b_2
struct[0].Fx_ini[1,0] = (-V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fx_ini[1,1] = -D_2/(2*H_2)
struct[0].Fx_ini[2,2] = -1/T1d0_2
struct[0].Fx_ini[3,3] = -1/T1q0_2
struct[0].Fx_ini[4,4] = -1/T_r_2
struct[0].Fx_ini[6,6] = -1/T_gov_1_2
struct[0].Fx_ini[7,6] = 1/T_gov_3_2
struct[0].Fx_ini[7,7] = -1/T_gov_3_2
struct[0].Fx_ini[8,8] = -0.00000100000000000000
struct[0].Fx_ini[9,9] = -K_delta_3
struct[0].Fx_ini[9,10] = Omega_b_3
struct[0].Fx_ini[10,9] = (-V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fx_ini[10,10] = -D_3/(2*H_3)
struct[0].Fx_ini[11,11] = -1/T1d0_3
struct[0].Fx_ini[12,12] = -1/T1q0_3
struct[0].Fx_ini[13,13] = -1/T_r_3
struct[0].Fx_ini[15,15] = -1/T_gov_1_3
struct[0].Fx_ini[16,15] = 1/T_gov_3_3
struct[0].Fx_ini[16,16] = -1/T_gov_3_3
struct[0].Fx_ini[17,17] = -0.00000100000000000000
struct[0].Fx_ini[19,19] = -K_delta_1
struct[0].Fx_ini[19,20] = Omega_b_1
struct[0].Fx_ini[20,20] = -D1_1/(2*H_1)
struct[0].Fx_ini[21,20] = 1/T_wo_1
struct[0].Fx_ini[21,21] = -1/T_wo_1
struct[0].Fx_ini[22,22] = -1/T_i_1
struct[0].Fx_ini[23,23] = -1/T_i_1
struct[0].Fx_ini[26,25] = -1
if mode == 11:
struct[0].Fy_ini[0,30] = -Omega_b_2
struct[0].Fy_ini[1,2] = (-i_d_2*sin(delta_2 - theta_2) - i_q_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,3] = (V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,6] = (-2*R_a_2*i_d_2 - V_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,7] = (-2*R_a_2*i_q_2 - V_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy_ini[1,12] = 1/(2*H_2)
struct[0].Fy_ini[1,30] = D_2/(2*H_2)
struct[0].Fy_ini[2,6] = (X1d_2 - X_d_2)/T1d0_2
struct[0].Fy_ini[2,10] = 1/T1d0_2
struct[0].Fy_ini[3,7] = (-X1q_2 + X_q_2)/T1q0_2
struct[0].Fy_ini[4,2] = 1/T_r_2
struct[0].Fy_ini[5,2] = -1
struct[0].Fy_ini[6,11] = 1/T_gov_1_2
struct[0].Fy_ini[8,8] = -K_imw_2
struct[0].Fy_ini[9,30] = -Omega_b_3
struct[0].Fy_ini[10,4] = (-i_d_3*sin(delta_3 - theta_3) - i_q_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,5] = (V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,13] = (-2*R_a_3*i_d_3 - V_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,14] = (-2*R_a_3*i_q_3 - V_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy_ini[10,19] = 1/(2*H_3)
struct[0].Fy_ini[10,30] = D_3/(2*H_3)
struct[0].Fy_ini[11,13] = (X1d_3 - X_d_3)/T1d0_3
struct[0].Fy_ini[11,17] = 1/T1d0_3
struct[0].Fy_ini[12,14] = (-X1q_3 + X_q_3)/T1q0_3
struct[0].Fy_ini[13,4] = 1/T_r_3
struct[0].Fy_ini[14,4] = -1
struct[0].Fy_ini[15,18] = 1/T_gov_1_3
struct[0].Fy_ini[17,15] = -K_imw_3
struct[0].Fy_ini[18,30] = -1
struct[0].Fy_ini[19,24] = -D3_1
struct[0].Fy_ini[19,29] = D3_1
struct[0].Fy_ini[19,30] = -Omega_b_1
struct[0].Fy_ini[20,24] = -1/(2*H_1)
struct[0].Fy_ini[20,26] = -1/(2*H_1)
struct[0].Fy_ini[20,29] = 1/(2*H_1)
struct[0].Fy_ini[22,22] = 1/T_i_1
struct[0].Fy_ini[23,23] = 1/T_i_1
struct[0].Fy_ini[24,25] = -1
struct[0].Fy_ini[25,28] = -1/H_s_1
struct[0].Gy_ini[0,0] = 2*V_1*g_1_2 + V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[0,1] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[0,2] = V_1*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[0,3] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[0,24] = -S_n_1/S_base
struct[0].Gy_ini[1,0] = 2*V_1*(-b_1_2 - bs_1_2/2) + V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[1,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[1,2] = V_1*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[1,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[1,25] = -S_n_1/S_base
struct[0].Gy_ini[2,0] = V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[2,1] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[2,2] = V_1*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + 2*V_2*(g_1_2 + g_2_3) + V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[2,3] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[2,4] = V_2*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[2,5] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[2,8] = -S_n_2/S_base
struct[0].Gy_ini[3,0] = V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy_ini[3,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy_ini[3,2] = V_1*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + 2*V_2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[3,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[3,4] = V_2*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[3,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[3,9] = -S_n_2/S_base
struct[0].Gy_ini[4,2] = V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[4,3] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[4,4] = V_2*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + 2*V_3*g_2_3
struct[0].Gy_ini[4,5] = V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[4,15] = -S_n_3/S_base
struct[0].Gy_ini[5,2] = V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy_ini[5,3] = V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[5,4] = V_2*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + 2*V_3*(-b_2_3 - bs_2_3/2)
struct[0].Gy_ini[5,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy_ini[5,16] = -S_n_3/S_base
struct[0].Gy_ini[6,2] = cos(delta_2 - theta_2)
struct[0].Gy_ini[6,3] = V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[6,6] = X1d_2
struct[0].Gy_ini[6,7] = R_a_2
struct[0].Gy_ini[7,2] = sin(delta_2 - theta_2)
struct[0].Gy_ini[7,3] = -V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[7,6] = R_a_2
struct[0].Gy_ini[7,7] = -X1q_2
struct[0].Gy_ini[8,2] = i_d_2*sin(delta_2 - theta_2) + i_q_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[8,3] = -V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[8,6] = V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[8,7] = V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[8,8] = -1
struct[0].Gy_ini[9,2] = i_d_2*cos(delta_2 - theta_2) - i_q_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[9,3] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[9,6] = V_2*cos(delta_2 - theta_2)
struct[0].Gy_ini[9,7] = -V_2*sin(delta_2 - theta_2)
struct[0].Gy_ini[9,9] = -1
struct[0].Gy_ini[10,10] = -1
struct[0].Gy_ini[11,11] = -1
struct[0].Gy_ini[11,20] = 1
struct[0].Gy_ini[12,12] = -1
struct[0].Gy_ini[13,4] = cos(delta_3 - theta_3)
struct[0].Gy_ini[13,5] = V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[13,13] = X1d_3
struct[0].Gy_ini[13,14] = R_a_3
struct[0].Gy_ini[14,4] = sin(delta_3 - theta_3)
struct[0].Gy_ini[14,5] = -V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[14,13] = R_a_3
struct[0].Gy_ini[14,14] = -X1q_3
struct[0].Gy_ini[15,4] = i_d_3*sin(delta_3 - theta_3) + i_q_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[15,5] = -V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[15,13] = V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[15,14] = V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[15,15] = -1
struct[0].Gy_ini[16,4] = i_d_3*cos(delta_3 - theta_3) - i_q_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[16,5] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[16,13] = V_3*cos(delta_3 - theta_3)
struct[0].Gy_ini[16,14] = -V_3*sin(delta_3 - theta_3)
struct[0].Gy_ini[16,16] = -1
struct[0].Gy_ini[17,17] = -1
struct[0].Gy_ini[18,18] = -1
struct[0].Gy_ini[18,21] = 1
struct[0].Gy_ini[19,19] = -1
struct[0].Gy_ini[20,20] = -1
struct[0].Gy_ini[21,21] = -1
struct[0].Gy_ini[22,0] = cos(delta_1 - theta_1)
struct[0].Gy_ini[22,1] = V_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[22,22] = X_v_1
struct[0].Gy_ini[22,23] = R_v_1
struct[0].Gy_ini[22,27] = -1
struct[0].Gy_ini[23,0] = sin(delta_1 - theta_1)
struct[0].Gy_ini[23,1] = -V_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[23,22] = R_v_1
struct[0].Gy_ini[23,23] = -X_v_1
struct[0].Gy_ini[24,0] = i_d_1*sin(delta_1 - theta_1) + i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[24,1] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[24,24] = -1
struct[0].Gy_ini[25,0] = i_d_1*cos(delta_1 - theta_1) - i_q_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[25,1] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[25,25] = -1
struct[0].Gy_ini[26,26] = -1
struct[0].Gy_ini[27,25] = -K_q_1
struct[0].Gy_ini[27,27] = -1
struct[0].Gy_ini[28,0] = -i_d_1*sin(delta_1 - theta_1) - i_q_1*cos(delta_1 - theta_1)
struct[0].Gy_ini[28,1] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy_ini[28,28] = 1
struct[0].Gy_ini[29,29] = -1
struct[0].Gy_ini[30,30] = -1
def run_nn(t,struct,mode):
# Parameters:
S_base = struct[0].S_base
g_1_2 = struct[0].g_1_2
b_1_2 = struct[0].b_1_2
bs_1_2 = struct[0].bs_1_2
g_2_3 = struct[0].g_2_3
b_2_3 = struct[0].b_2_3
bs_2_3 = struct[0].bs_2_3
U_1_n = struct[0].U_1_n
U_2_n = struct[0].U_2_n
U_3_n = struct[0].U_3_n
S_n_2 = struct[0].S_n_2
H_2 = struct[0].H_2
Omega_b_2 = struct[0].Omega_b_2
T1d0_2 = struct[0].T1d0_2
T1q0_2 = struct[0].T1q0_2
X_d_2 = struct[0].X_d_2
X_q_2 = struct[0].X_q_2
X1d_2 = struct[0].X1d_2
X1q_2 = struct[0].X1q_2
D_2 = struct[0].D_2
R_a_2 = struct[0].R_a_2
K_delta_2 = struct[0].K_delta_2
K_a_2 = struct[0].K_a_2
K_ai_2 = struct[0].K_ai_2
T_r_2 = struct[0].T_r_2
Droop_2 = struct[0].Droop_2
T_gov_1_2 = struct[0].T_gov_1_2
T_gov_2_2 = struct[0].T_gov_2_2
T_gov_3_2 = struct[0].T_gov_3_2
K_imw_2 = struct[0].K_imw_2
omega_ref_2 = struct[0].omega_ref_2
S_n_3 = struct[0].S_n_3
H_3 = struct[0].H_3
Omega_b_3 = struct[0].Omega_b_3
T1d0_3 = struct[0].T1d0_3
T1q0_3 = struct[0].T1q0_3
X_d_3 = struct[0].X_d_3
X_q_3 = struct[0].X_q_3
X1d_3 = struct[0].X1d_3
X1q_3 = struct[0].X1q_3
D_3 = struct[0].D_3
R_a_3 = struct[0].R_a_3
K_delta_3 = struct[0].K_delta_3
K_a_3 = struct[0].K_a_3
K_ai_3 = struct[0].K_ai_3
T_r_3 = struct[0].T_r_3
Droop_3 = struct[0].Droop_3
T_gov_1_3 = struct[0].T_gov_1_3
T_gov_2_3 = struct[0].T_gov_2_3
T_gov_3_3 = struct[0].T_gov_3_3
K_imw_3 = struct[0].K_imw_3
omega_ref_3 = struct[0].omega_ref_3
K_sec_2 = struct[0].K_sec_2
K_sec_3 = struct[0].K_sec_3
S_n_1 = struct[0].S_n_1
R_s_1 = struct[0].R_s_1
H_1 = struct[0].H_1
Omega_b_1 = struct[0].Omega_b_1
R_v_1 = struct[0].R_v_1
X_v_1 = struct[0].X_v_1
D1_1 = struct[0].D1_1
D2_1 = struct[0].D2_1
D3_1 = struct[0].D3_1
K_delta_1 = struct[0].K_delta_1
T_wo_1 = struct[0].T_wo_1
T_i_1 = struct[0].T_i_1
K_q_1 = struct[0].K_q_1
T_q_1 = struct[0].T_q_1
H_s_1 = struct[0].H_s_1
K_p_soc_1 = struct[0].K_p_soc_1
K_i_soc_1 = struct[0].K_i_soc_1
# Inputs:
P_1 = struct[0].P_1
Q_1 = struct[0].Q_1
P_2 = struct[0].P_2
Q_2 = struct[0].Q_2
P_3 = struct[0].P_3
Q_3 = struct[0].Q_3
v_ref_2 = struct[0].v_ref_2
v_pss_2 = struct[0].v_pss_2
p_c_2 = struct[0].p_c_2
v_ref_3 = struct[0].v_ref_3
v_pss_3 = struct[0].v_pss_3
p_c_3 = struct[0].p_c_3
p_in_1 = struct[0].p_in_1
Dp_ref_1 = struct[0].Dp_ref_1
q_ref_1 = struct[0].q_ref_1
p_src_1 = struct[0].p_src_1
soc_ref_1 = struct[0].soc_ref_1
# Dynamical states:
delta_2 = struct[0].x[0,0]
omega_2 = struct[0].x[1,0]
e1q_2 = struct[0].x[2,0]
e1d_2 = struct[0].x[3,0]
v_c_2 = struct[0].x[4,0]
xi_v_2 = struct[0].x[5,0]
x_gov_1_2 = struct[0].x[6,0]
x_gov_2_2 = struct[0].x[7,0]
xi_imw_2 = struct[0].x[8,0]
delta_3 = struct[0].x[9,0]
omega_3 = struct[0].x[10,0]
e1q_3 = struct[0].x[11,0]
e1d_3 = struct[0].x[12,0]
v_c_3 = struct[0].x[13,0]
xi_v_3 = struct[0].x[14,0]
x_gov_1_3 = struct[0].x[15,0]
x_gov_2_3 = struct[0].x[16,0]
xi_imw_3 = struct[0].x[17,0]
xi_freq = struct[0].x[18,0]
delta_1 = struct[0].x[19,0]
omega_v_1 = struct[0].x[20,0]
x_wo_1 = struct[0].x[21,0]
i_d_1 = struct[0].x[22,0]
i_q_1 = struct[0].x[23,0]
xi_q_1 = struct[0].x[24,0]
soc_1 = struct[0].x[25,0]
xi_soc_1 = struct[0].x[26,0]
# Algebraic states:
V_1 = struct[0].y_run[0,0]
theta_1 = struct[0].y_run[1,0]
V_2 = struct[0].y_run[2,0]
theta_2 = struct[0].y_run[3,0]
V_3 = struct[0].y_run[4,0]
theta_3 = struct[0].y_run[5,0]
i_d_2 = struct[0].y_run[6,0]
i_q_2 = struct[0].y_run[7,0]
p_g_2_1 = struct[0].y_run[8,0]
q_g_2_1 = struct[0].y_run[9,0]
v_f_2 = struct[0].y_run[10,0]
p_m_ref_2 = struct[0].y_run[11,0]
p_m_2 = struct[0].y_run[12,0]
i_d_3 = struct[0].y_run[13,0]
i_q_3 = struct[0].y_run[14,0]
p_g_3_1 = struct[0].y_run[15,0]
q_g_3_1 = struct[0].y_run[16,0]
v_f_3 = struct[0].y_run[17,0]
p_m_ref_3 = struct[0].y_run[18,0]
p_m_3 = struct[0].y_run[19,0]
p_r_2 = struct[0].y_run[20,0]
p_r_3 = struct[0].y_run[21,0]
i_d_ref_1 = struct[0].y_run[22,0]
i_q_ref_1 = struct[0].y_run[23,0]
p_g_1_1 = struct[0].y_run[24,0]
q_g_1_1 = struct[0].y_run[25,0]
p_d2_1 = struct[0].y_run[26,0]
e_v_1 = struct[0].y_run[27,0]
p_sto_1 = struct[0].y_run[28,0]
p_m_1 = struct[0].y_run[29,0]
omega_coi = struct[0].y_run[30,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_2*delta_2 + Omega_b_2*(omega_2 - omega_coi)
struct[0].f[1,0] = (-D_2*(omega_2 - omega_coi) - i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) - i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2)) + p_m_2)/(2*H_2)
struct[0].f[2,0] = (-e1q_2 - i_d_2*(-X1d_2 + X_d_2) + v_f_2)/T1d0_2
struct[0].f[3,0] = (-e1d_2 + i_q_2*(-X1q_2 + X_q_2))/T1q0_2
struct[0].f[4,0] = (V_2 - v_c_2)/T_r_2
struct[0].f[5,0] = -V_2 + v_ref_2
struct[0].f[6,0] = (p_m_ref_2 - x_gov_1_2)/T_gov_1_2
struct[0].f[7,0] = (x_gov_1_2 - x_gov_2_2)/T_gov_3_2
struct[0].f[8,0] = K_imw_2*(p_c_2 - p_g_2_1) - 1.0e-6*xi_imw_2
struct[0].f[9,0] = -K_delta_3*delta_3 + Omega_b_3*(omega_3 - omega_coi)
struct[0].f[10,0] = (-D_3*(omega_3 - omega_coi) - i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) - i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3)) + p_m_3)/(2*H_3)
struct[0].f[11,0] = (-e1q_3 - i_d_3*(-X1d_3 + X_d_3) + v_f_3)/T1d0_3
struct[0].f[12,0] = (-e1d_3 + i_q_3*(-X1q_3 + X_q_3))/T1q0_3
struct[0].f[13,0] = (V_3 - v_c_3)/T_r_3
struct[0].f[14,0] = -V_3 + v_ref_3
struct[0].f[15,0] = (p_m_ref_3 - x_gov_1_3)/T_gov_1_3
struct[0].f[16,0] = (x_gov_1_3 - x_gov_2_3)/T_gov_3_3
struct[0].f[17,0] = K_imw_3*(p_c_3 - p_g_3_1) - 1.0e-6*xi_imw_3
struct[0].f[18,0] = 1 - omega_coi
struct[0].f[19,0] = D3_1*(-p_g_1_1 + p_m_1) - K_delta_1*delta_1 + Omega_b_1*(-omega_coi + omega_v_1)
struct[0].f[20,0] = (-D1_1*(omega_v_1 - 1) - p_d2_1 - p_g_1_1 + p_m_1)/(2*H_1)
struct[0].f[21,0] = (omega_v_1 - x_wo_1 - 1.0)/T_wo_1
struct[0].f[22,0] = (-i_d_1 + i_d_ref_1)/T_i_1
struct[0].f[23,0] = (-i_q_1 + i_q_ref_1)/T_i_1
struct[0].f[24,0] = -q_g_1_1 + q_ref_1
struct[0].f[25,0] = -p_sto_1/H_s_1
struct[0].f[26,0] = -soc_1 + soc_ref_1
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = -P_1/S_base + V_1**2*g_1_2 + V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) - S_n_1*p_g_1_1/S_base
struct[0].g[1,0] = -Q_1/S_base + V_1**2*(-b_1_2 - bs_1_2/2) + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) - S_n_1*q_g_1_1/S_base
struct[0].g[2,0] = -P_2/S_base + V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2**2*(g_1_2 + g_2_3) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) - S_n_2*p_g_2_1/S_base
struct[0].g[3,0] = -Q_2/S_base + V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + V_2**2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3)) - S_n_2*q_g_2_1/S_base
struct[0].g[4,0] = -P_3/S_base + V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + V_3**2*g_2_3 - S_n_3*p_g_3_1/S_base
struct[0].g[5,0] = -Q_3/S_base + V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + V_3**2*(-b_2_3 - bs_2_3/2) - S_n_3*q_g_3_1/S_base
struct[0].g[6,0] = R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2) + X1d_2*i_d_2 - e1q_2
struct[0].g[7,0] = R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2) - X1q_2*i_q_2 - e1d_2
struct[0].g[8,0] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2) - p_g_2_1
struct[0].g[9,0] = V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2) - q_g_2_1
struct[0].g[10,0] = K_a_2*(-v_c_2 + v_pss_2 + v_ref_2) + K_ai_2*xi_v_2 - v_f_2
struct[0].g[11,0] = p_c_2 - p_m_ref_2 + p_r_2 + xi_imw_2 - (omega_2 - omega_ref_2)/Droop_2
struct[0].g[12,0] = T_gov_2_2*(x_gov_1_2 - x_gov_2_2)/T_gov_3_2 - p_m_2 + x_gov_2_2
struct[0].g[13,0] = R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3) + X1d_3*i_d_3 - e1q_3
struct[0].g[14,0] = R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3) - X1q_3*i_q_3 - e1d_3
struct[0].g[15,0] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3) - p_g_3_1
struct[0].g[16,0] = V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3) - q_g_3_1
struct[0].g[17,0] = K_a_3*(-v_c_3 + v_pss_3 + v_ref_3) + K_ai_3*xi_v_3 - v_f_3
struct[0].g[18,0] = p_c_3 - p_m_ref_3 + p_r_3 + xi_imw_3 - (omega_3 - omega_ref_3)/Droop_3
struct[0].g[19,0] = T_gov_2_3*(x_gov_1_3 - x_gov_2_3)/T_gov_3_3 - p_m_3 + x_gov_2_3
struct[0].g[20,0] = K_sec_2*xi_freq/2 - p_r_2
struct[0].g[21,0] = K_sec_3*xi_freq/2 - p_r_3
struct[0].g[22,0] = R_v_1*i_q_ref_1 + V_1*cos(delta_1 - theta_1) + X_v_1*i_d_ref_1 - e_v_1
struct[0].g[23,0] = R_v_1*i_d_ref_1 + V_1*sin(delta_1 - theta_1) - X_v_1*i_q_ref_1
struct[0].g[24,0] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1) - p_g_1_1
struct[0].g[25,0] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1) - q_g_1_1
struct[0].g[26,0] = D2_1*(omega_v_1 - x_wo_1 - 1.0) - p_d2_1
struct[0].g[27,0] = K_q_1*(-q_g_1_1 + q_ref_1 + xi_q_1/T_q_1) - e_v_1
struct[0].g[28,0] = -i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) - i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1)) + p_src_1 + p_sto_1
struct[0].g[29,0] = Dp_ref_1 - p_m_1 + p_src_1 + Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
struct[0].g[30,0] = omega_2/2 + omega_3/2 - omega_coi
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_1
struct[0].h[1,0] = V_2
struct[0].h[2,0] = V_3
struct[0].h[3,0] = i_d_2*(R_a_2*i_d_2 + V_2*sin(delta_2 - theta_2)) + i_q_2*(R_a_2*i_q_2 + V_2*cos(delta_2 - theta_2))
struct[0].h[4,0] = i_d_3*(R_a_3*i_d_3 + V_3*sin(delta_3 - theta_3)) + i_q_3*(R_a_3*i_q_3 + V_3*cos(delta_3 - theta_3))
struct[0].h[5,0] = i_d_1*(R_s_1*i_d_1 + V_1*sin(delta_1 - theta_1)) + i_q_1*(R_s_1*i_q_1 + V_1*cos(delta_1 - theta_1))
struct[0].h[6,0] = Piecewise(np.array([(0.0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_i_soc_1*xi_soc_1 + K_p_soc_1*(-soc_1 + soc_ref_1), True)]))
if mode == 10:
struct[0].Fx[0,0] = -K_delta_2
struct[0].Fx[0,1] = Omega_b_2
struct[0].Fx[1,0] = (-V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fx[1,1] = -D_2/(2*H_2)
struct[0].Fx[2,2] = -1/T1d0_2
struct[0].Fx[3,3] = -1/T1q0_2
struct[0].Fx[4,4] = -1/T_r_2
struct[0].Fx[6,6] = -1/T_gov_1_2
struct[0].Fx[7,6] = 1/T_gov_3_2
struct[0].Fx[7,7] = -1/T_gov_3_2
struct[0].Fx[8,8] = -0.00000100000000000000
struct[0].Fx[9,9] = -K_delta_3
struct[0].Fx[9,10] = Omega_b_3
struct[0].Fx[10,9] = (-V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fx[10,10] = -D_3/(2*H_3)
struct[0].Fx[11,11] = -1/T1d0_3
struct[0].Fx[12,12] = -1/T1q0_3
struct[0].Fx[13,13] = -1/T_r_3
struct[0].Fx[15,15] = -1/T_gov_1_3
struct[0].Fx[16,15] = 1/T_gov_3_3
struct[0].Fx[16,16] = -1/T_gov_3_3
struct[0].Fx[17,17] = -0.00000100000000000000
struct[0].Fx[19,19] = -K_delta_1
struct[0].Fx[19,20] = Omega_b_1
struct[0].Fx[20,20] = -D1_1/(2*H_1)
struct[0].Fx[21,20] = 1/T_wo_1
struct[0].Fx[21,21] = -1/T_wo_1
struct[0].Fx[22,22] = -1/T_i_1
struct[0].Fx[23,23] = -1/T_i_1
struct[0].Fx[26,25] = -1
if mode == 11:
struct[0].Fy[0,30] = -Omega_b_2
struct[0].Fy[1,2] = (-i_d_2*sin(delta_2 - theta_2) - i_q_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,3] = (V_2*i_d_2*cos(delta_2 - theta_2) - V_2*i_q_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,6] = (-2*R_a_2*i_d_2 - V_2*sin(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,7] = (-2*R_a_2*i_q_2 - V_2*cos(delta_2 - theta_2))/(2*H_2)
struct[0].Fy[1,12] = 1/(2*H_2)
struct[0].Fy[1,30] = D_2/(2*H_2)
struct[0].Fy[2,6] = (X1d_2 - X_d_2)/T1d0_2
struct[0].Fy[2,10] = 1/T1d0_2
struct[0].Fy[3,7] = (-X1q_2 + X_q_2)/T1q0_2
struct[0].Fy[4,2] = 1/T_r_2
struct[0].Fy[5,2] = -1
struct[0].Fy[6,11] = 1/T_gov_1_2
struct[0].Fy[8,8] = -K_imw_2
struct[0].Fy[9,30] = -Omega_b_3
struct[0].Fy[10,4] = (-i_d_3*sin(delta_3 - theta_3) - i_q_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,5] = (V_3*i_d_3*cos(delta_3 - theta_3) - V_3*i_q_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,13] = (-2*R_a_3*i_d_3 - V_3*sin(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,14] = (-2*R_a_3*i_q_3 - V_3*cos(delta_3 - theta_3))/(2*H_3)
struct[0].Fy[10,19] = 1/(2*H_3)
struct[0].Fy[10,30] = D_3/(2*H_3)
struct[0].Fy[11,13] = (X1d_3 - X_d_3)/T1d0_3
struct[0].Fy[11,17] = 1/T1d0_3
struct[0].Fy[12,14] = (-X1q_3 + X_q_3)/T1q0_3
struct[0].Fy[13,4] = 1/T_r_3
struct[0].Fy[14,4] = -1
struct[0].Fy[15,18] = 1/T_gov_1_3
struct[0].Fy[17,15] = -K_imw_3
struct[0].Fy[18,30] = -1
struct[0].Fy[19,24] = -D3_1
struct[0].Fy[19,29] = D3_1
struct[0].Fy[19,30] = -Omega_b_1
struct[0].Fy[20,24] = -1/(2*H_1)
struct[0].Fy[20,26] = -1/(2*H_1)
struct[0].Fy[20,29] = 1/(2*H_1)
struct[0].Fy[22,22] = 1/T_i_1
struct[0].Fy[23,23] = 1/T_i_1
struct[0].Fy[24,25] = -1
struct[0].Fy[25,28] = -1/H_s_1
struct[0].Gy[0,0] = 2*V_1*g_1_2 + V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[0,1] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[0,2] = V_1*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[0,3] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[0,24] = -S_n_1/S_base
struct[0].Gy[1,0] = 2*V_1*(-b_1_2 - bs_1_2/2) + V_2*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[1,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[1,2] = V_1*(b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[1,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[1,25] = -S_n_1/S_base
struct[0].Gy[2,0] = V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[2,1] = V_1*V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[2,2] = V_1*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + 2*V_2*(g_1_2 + g_2_3) + V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[2,3] = V_1*V_2*(-b_1_2*cos(theta_1 - theta_2) - g_1_2*sin(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[2,4] = V_2*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[2,5] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[2,8] = -S_n_2/S_base
struct[0].Gy[3,0] = V_2*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2))
struct[0].Gy[3,1] = V_1*V_2*(-b_1_2*sin(theta_1 - theta_2) + g_1_2*cos(theta_1 - theta_2))
struct[0].Gy[3,2] = V_1*(b_1_2*cos(theta_1 - theta_2) + g_1_2*sin(theta_1 - theta_2)) + 2*V_2*(-b_1_2 - b_2_3 - bs_1_2/2 - bs_2_3/2) + V_3*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[3,3] = V_1*V_2*(b_1_2*sin(theta_1 - theta_2) - g_1_2*cos(theta_1 - theta_2)) + V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[3,4] = V_2*(b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[3,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[3,9] = -S_n_2/S_base
struct[0].Gy[4,2] = V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[4,3] = V_2*V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[4,4] = V_2*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3)) + 2*V_3*g_2_3
struct[0].Gy[4,5] = V_2*V_3*(-b_2_3*cos(theta_2 - theta_3) - g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[4,15] = -S_n_3/S_base
struct[0].Gy[5,2] = V_3*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3))
struct[0].Gy[5,3] = V_2*V_3*(-b_2_3*sin(theta_2 - theta_3) + g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[5,4] = V_2*(b_2_3*cos(theta_2 - theta_3) + g_2_3*sin(theta_2 - theta_3)) + 2*V_3*(-b_2_3 - bs_2_3/2)
struct[0].Gy[5,5] = V_2*V_3*(b_2_3*sin(theta_2 - theta_3) - g_2_3*cos(theta_2 - theta_3))
struct[0].Gy[5,16] = -S_n_3/S_base
struct[0].Gy[6,2] = cos(delta_2 - theta_2)
struct[0].Gy[6,3] = V_2*sin(delta_2 - theta_2)
struct[0].Gy[6,6] = X1d_2
struct[0].Gy[6,7] = R_a_2
struct[0].Gy[7,2] = sin(delta_2 - theta_2)
struct[0].Gy[7,3] = -V_2*cos(delta_2 - theta_2)
struct[0].Gy[7,6] = R_a_2
struct[0].Gy[7,7] = -X1q_2
struct[0].Gy[8,2] = i_d_2*sin(delta_2 - theta_2) + i_q_2*cos(delta_2 - theta_2)
struct[0].Gy[8,3] = -V_2*i_d_2*cos(delta_2 - theta_2) + V_2*i_q_2*sin(delta_2 - theta_2)
struct[0].Gy[8,6] = V_2*sin(delta_2 - theta_2)
struct[0].Gy[8,7] = V_2*cos(delta_2 - theta_2)
struct[0].Gy[8,8] = -1
struct[0].Gy[9,2] = i_d_2*cos(delta_2 - theta_2) - i_q_2*sin(delta_2 - theta_2)
struct[0].Gy[9,3] = V_2*i_d_2*sin(delta_2 - theta_2) + V_2*i_q_2*cos(delta_2 - theta_2)
struct[0].Gy[9,6] = V_2*cos(delta_2 - theta_2)
struct[0].Gy[9,7] = -V_2*sin(delta_2 - theta_2)
struct[0].Gy[9,9] = -1
struct[0].Gy[10,10] = -1
struct[0].Gy[11,11] = -1
struct[0].Gy[11,20] = 1
struct[0].Gy[12,12] = -1
struct[0].Gy[13,4] = cos(delta_3 - theta_3)
struct[0].Gy[13,5] = V_3*sin(delta_3 - theta_3)
struct[0].Gy[13,13] = X1d_3
struct[0].Gy[13,14] = R_a_3
struct[0].Gy[14,4] = sin(delta_3 - theta_3)
struct[0].Gy[14,5] = -V_3*cos(delta_3 - theta_3)
struct[0].Gy[14,13] = R_a_3
struct[0].Gy[14,14] = -X1q_3
struct[0].Gy[15,4] = i_d_3*sin(delta_3 - theta_3) + i_q_3*cos(delta_3 - theta_3)
struct[0].Gy[15,5] = -V_3*i_d_3*cos(delta_3 - theta_3) + V_3*i_q_3*sin(delta_3 - theta_3)
struct[0].Gy[15,13] = V_3*sin(delta_3 - theta_3)
struct[0].Gy[15,14] = V_3*cos(delta_3 - theta_3)
struct[0].Gy[15,15] = -1
struct[0].Gy[16,4] = i_d_3*cos(delta_3 - theta_3) - i_q_3*sin(delta_3 - theta_3)
struct[0].Gy[16,5] = V_3*i_d_3*sin(delta_3 - theta_3) + V_3*i_q_3*cos(delta_3 - theta_3)
struct[0].Gy[16,13] = V_3*cos(delta_3 - theta_3)
struct[0].Gy[16,14] = -V_3*sin(delta_3 - theta_3)
struct[0].Gy[16,16] = -1
struct[0].Gy[17,17] = -1
struct[0].Gy[18,18] = -1
struct[0].Gy[18,21] = 1
struct[0].Gy[19,19] = -1
struct[0].Gy[20,20] = -1
struct[0].Gy[21,21] = -1
struct[0].Gy[22,0] = cos(delta_1 - theta_1)
struct[0].Gy[22,1] = V_1*sin(delta_1 - theta_1)
struct[0].Gy[22,22] = X_v_1
struct[0].Gy[22,23] = R_v_1
struct[0].Gy[22,27] = -1
struct[0].Gy[23,0] = sin(delta_1 - theta_1)
struct[0].Gy[23,1] = -V_1*cos(delta_1 - theta_1)
struct[0].Gy[23,22] = R_v_1
struct[0].Gy[23,23] = -X_v_1
struct[0].Gy[24,0] = i_d_1*sin(delta_1 - theta_1) + i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[24,1] = -V_1*i_d_1*cos(delta_1 - theta_1) + V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy[24,24] = -1
struct[0].Gy[25,0] = i_d_1*cos(delta_1 - theta_1) - i_q_1*sin(delta_1 - theta_1)
struct[0].Gy[25,1] = V_1*i_d_1*sin(delta_1 - theta_1) + V_1*i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[25,25] = -1
struct[0].Gy[26,26] = -1
struct[0].Gy[27,25] = -K_q_1
struct[0].Gy[27,27] = -1
struct[0].Gy[28,0] = -i_d_1*sin(delta_1 - theta_1) - i_q_1*cos(delta_1 - theta_1)
struct[0].Gy[28,1] = V_1*i_d_1*cos(delta_1 - theta_1) - V_1*i_q_1*sin(delta_1 - theta_1)
struct[0].Gy[28,28] = 1
struct[0].Gy[29,29] = -1
struct[0].Gy[30,30] = -1
struct[0].Gu[0,0] = -1/S_base
struct[0].Gu[1,1] = -1/S_base
struct[0].Gu[2,2] = -1/S_base
struct[0].Gu[3,3] = -1/S_base
struct[0].Gu[4,4] = -1/S_base
struct[0].Gu[5,5] = -1/S_base
struct[0].Gu[10,6] = K_a_2
struct[0].Gu[10,7] = K_a_2
struct[0].Gu[11,8] = 1
struct[0].Gu[17,9] = K_a_3
struct[0].Gu[17,10] = K_a_3
struct[0].Gu[18,11] = 1
struct[0].Gu[27,14] = K_q_1
struct[0].Gu[28,15] = 1
struct[0].Gu[29,13] = 1
struct[0].Gu[29,15] = 1
struct[0].Gu[29,16] = Piecewise(np.array([(0, ((p_sto_1 > 0.0) | (soc_1 > 1.0)) & ((p_sto_1 > 0.0) | (p_sto_1 < 0.0)) & ((soc_1 > 1.0) | (soc_1 < 0.0)) & ((p_sto_1 < 0.0) | (soc_1 < 0.0))), (K_p_soc_1, True)]))
@numba.njit(cache=True)
def Piecewise(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def ITE(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def Abs(x):
return np.abs(x)
@numba.njit(cache=True)
def ini_dae_jacobian_numba(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,10)
ini(struct,11)
for row,col in zip(struct[0].Fx_ini_rows,struct[0].Fx_ini_cols):
struct[0].Ac_ini[row,col] = struct[0].Fx_ini[row,col]
for row,col in zip(struct[0].Fy_ini_rows,struct[0].Fy_ini_cols):
struct[0].Ac_ini[row,col+N_x] = struct[0].Fy_ini[row,col]
for row,col in zip(struct[0].Gx_ini_rows,struct[0].Gx_ini_cols):
struct[0].Ac_ini[row+N_x,col] = struct[0].Gx_ini[row,col]
for row,col in zip(struct[0].Gy_ini_rows,struct[0].Gy_ini_cols):
struct[0].Ac_ini[row+N_x,col+N_x] = struct[0].Gy_ini[row,col]
@numba.njit(cache=True)
def ini_dae_problem(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,2)
ini(struct,3)
struct[0].fg[:N_x,:] = struct[0].f[:]
struct[0].fg[N_x:,:] = struct[0].g[:]
@numba.njit(cache=True)
def ssate(struct,xy):
for it in range(100):
ini_dae_jacobian_numba(struct,xy[:,0])
ini_dae_problem(struct,xy[:,0])
xy[:] += np.linalg.solve(struct[0].Ac_ini,-struct[0].fg)
if np.max(np.abs(struct[0].fg[:,0]))<1e-8: break
N_x = struct[0].N_x
struct[0].x[:,0] = xy[:N_x,0]
struct[0].y_ini[:,0] = xy[N_x:,0]
return xy,it
@numba.njit(cache=True)
def daesolver(struct):
sin = np.sin
cos = np.cos
sqrt = np.sqrt
i = 0
Dt = struct[i].Dt
N_x = struct[i].N_x
N_y = struct[i].N_y
N_z = struct[i].N_z
decimation = struct[i].decimation
eye = np.eye(N_x)
t = struct[i].t
t_end = struct[i].t_end
if struct[i].it == 0:
run(t,struct, 1)
struct[i].it_store = 0
struct[i]['T'][0] = t
struct[i].X[0,:] = struct[i].x[:,0]
struct[i].Y[0,:] = struct[i].y_run[:,0]
struct[i].Z[0,:] = struct[i].h[:,0]
solver = struct[i].solvern
while t<t_end:
struct[i].it += 1
struct[i].t += Dt
t = struct[i].t
if solver == 5: # Teapezoidal DAE as in Milano's book
run(t,struct, 2)
run(t,struct, 3)
x = np.copy(struct[i].x[:])
y = np.copy(struct[i].y_run[:])
f = np.copy(struct[i].f[:])
g = np.copy(struct[i].g[:])
for iter in range(struct[i].imax):
run(t,struct, 2)
run(t,struct, 3)
run(t,struct,10)
run(t,struct,11)
x_i = struct[i].x[:]
y_i = struct[i].y_run[:]
f_i = struct[i].f[:]
g_i = struct[i].g[:]
F_x_i = struct[i].Fx[:,:]
F_y_i = struct[i].Fy[:,:]
G_x_i = struct[i].Gx[:,:]
G_y_i = struct[i].Gy[:,:]
A_c_i = np.vstack((np.hstack((eye-0.5*Dt*F_x_i, -0.5*Dt*F_y_i)),
np.hstack((G_x_i, G_y_i))))
f_n_i = x_i - x - 0.5*Dt*(f_i+f)
# print(t,iter,g_i)
Dxy_i = np.linalg.solve(-A_c_i,np.vstack((f_n_i,g_i)))
x_i = x_i + Dxy_i[0:N_x]
y_i = y_i + Dxy_i[N_x:(N_x+N_y)]
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# [f_i,g_i,F_x_i,F_y_i,G_x_i,G_y_i] = smib_transient(x_i,y_i,u);
# A_c_i = [[eye(N_x)-0.5*Dt*F_x_i, -0.5*Dt*F_y_i],
# [ G_x_i, G_y_i]];
# f_n_i = x_i - x - 0.5*Dt*(f_i+f);
# Dxy_i = -A_c_i\[f_n_i.',g_i.'].';
# x_i = x_i + Dxy_i(1:N_x);
# y_i = y_i + Dxy_i(N_x+1:N_x+N_y);
xy = np.vstack((x_i,y_i))
max_relative = 0.0
for it_var in range(N_x+N_y):
abs_value = np.abs(xy[it_var,0])
if abs_value < 0.001:
abs_value = 0.001
relative_error = np.abs(Dxy_i[it_var,0])/abs_value
if relative_error > max_relative: max_relative = relative_error
if max_relative<struct[i].itol:
break
# if iter>struct[i].imax-2:
# print('Convergence problem')
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# channels
if struct[i].store == 1:
it_store = struct[i].it_store
if struct[i].it >= it_store*decimation:
struct[i]['T'][it_store+1] = t
struct[i].X[it_store+1,:] = struct[i].x[:,0]
struct[i].Y[it_store+1,:] = struct[i].y_run[:,0]
struct[i].Z[it_store+1,:] = struct[i].h[:,0]
struct[i].iters[it_store+1,0] = iter
struct[i].it_store += 1
struct[i].t = t
return t
def nonzeros():
Fx_ini_rows = [0, 0, 1, 1, 2, 3, 4, 6, 7, 7, 8, 9, 9, 10, 10, 11, 12, 13, 15, 16, 16, 17, 19, 19, 20, 21, 21, 22, 23, 26]
Fx_ini_cols = [0, 1, 0, 1, 2, 3, 4, 6, 6, 7, 8, 9, 10, 9, 10, 11, 12, 13, 15, 15, 16, 17, 19, 20, 20, 20, 21, 22, 23, 25]
Fy_ini_rows = [0, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4, 5, 6, 8, 9, 10, 10, 10, 10, 10, 10, 11, 11, 12, 13, 14, 15, 17, 18, 19, 19, 19, 20, 20, 20, 22, 23, 24, 25]
Fy_ini_cols = [30, 2, 3, 6, 7, 12, 30, 6, 10, 7, 2, 2, 11, 8, 30, 4, 5, 13, 14, 19, 30, 13, 17, 14, 4, 4, 18, 15, 30, 24, 29, 30, 24, 26, 29, 22, 23, 25, 28]
Gx_ini_rows = [6, 6, 7, 7, 8, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 16, 17, 17, 18, 18, 19, 19, 20, 21, 22, 23, 24, 24, 24, 25, 25, 25, 26, 26, 27, 28, 28, 28, 29, 29, 30, 30]
Gx_ini_cols = [0, 2, 0, 3, 0, 0, 4, 5, 1, 8, 6, 7, 9, 11, 9, 12, 9, 9, 13, 14, 10, 17, 15, 16, 18, 18, 19, 19, 19, 22, 23, 19, 22, 23, 20, 21, 24, 19, 22, 23, 25, 26, 1, 10]
Gy_ini_rows = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 11, 11, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 17, 18, 18, 19, 20, 21, 22, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 25, 25, 25, 26, 27, 27, 28, 28, 28, 29, 30]
Gy_ini_cols = [0, 1, 2, 3, 24, 0, 1, 2, 3, 25, 0, 1, 2, 3, 4, 5, 8, 0, 1, 2, 3, 4, 5, 9, 2, 3, 4, 5, 15, 2, 3, 4, 5, 16, 2, 3, 6, 7, 2, 3, 6, 7, 2, 3, 6, 7, 8, 2, 3, 6, 7, 9, 10, 11, 20, 12, 4, 5, 13, 14, 4, 5, 13, 14, 4, 5, 13, 14, 15, 4, 5, 13, 14, 16, 17, 18, 21, 19, 20, 21, 0, 1, 22, 23, 27, 0, 1, 22, 23, 0, 1, 24, 0, 1, 25, 26, 25, 27, 0, 1, 28, 29, 30]
return Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols
| 48.109031
| 744
| 0.563797
| 29,392
| 131,049
| 2.162187
| 0.01218
| 0.197054
| 0.064452
| 0.035877
| 0.906784
| 0.876288
| 0.850231
| 0.824503
| 0.807556
| 0.793803
| 0
| 0.151018
| 0.249143
| 131,049
| 2,724
| 745
| 48.109031
| 0.494832
| 0.017803
| 0
| 0.718388
| 0
| 0
| 0.014834
| 0.001402
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020574
| false
| 0
| 0.001715
| 0.000857
| 0.035148
| 0.003429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
716f27f0114d1157b2c7fce7ba0854a2c899ace5
| 1,582
|
py
|
Python
|
backend/rsa/forms.py
|
BigBlackWolf/crypto
|
7b2dc151f0abd5319be5bccca92fcac77df7771d
|
[
"MIT"
] | null | null | null |
backend/rsa/forms.py
|
BigBlackWolf/crypto
|
7b2dc151f0abd5319be5bccca92fcac77df7771d
|
[
"MIT"
] | 3
|
2021-03-09T00:51:05.000Z
|
2022-02-17T20:04:03.000Z
|
backend/rsa/forms.py
|
BigBlackWolf/crypto
|
7b2dc151f0abd5319be5bccca92fcac77df7771d
|
[
"MIT"
] | 1
|
2019-02-10T22:44:53.000Z
|
2019-02-10T22:44:53.000Z
|
from wtforms import IntegerField, validators, StringField
from flask_wtf import FlaskForm
import re
# Regular expression to validate hex number
hex_regexp = re.compile('[0-9a-fA-F]+')
class GenerateKeyForm(FlaskForm):
length = IntegerField(validators=[validators.NumberRange(64, 1024)])
class EncryptForm(FlaskForm):
modulus = StringField(validators=[validators.Regexp(hex_regexp)])
exponent = StringField(validators=[validators.Regexp(hex_regexp)])
message = StringField(validators=[validators.Length(min=1, max=1024)])
class DecryptForm(FlaskForm):
ciphertext = StringField(validators=[validators.Regexp(hex_regexp)])
class SignatureForm(FlaskForm):
message = StringField(validators=[validators.Length(min=1, max=1024)])
class VerificationForm(FlaskForm):
message = StringField(validators=[validators.Length(min=1, max=1024)])
signature = StringField(validators=[validators.Length(min=1, max=1024)])
modulus = StringField(validators=[validators.Regexp(hex_regexp)])
exponent = StringField(validators=[validators.Regexp(hex_regexp)])
class SendKeyForm(FlaskForm):
modulus = StringField(validators=[validators.Regexp(hex_regexp)])
exponent = StringField(validators=[validators.Regexp(hex_regexp)])
class ReceiveKeyForm(FlaskForm):
key = StringField(validators=[validators.Length(min=1, max=1024)])
signature = StringField(validators=[validators.Length(min=1, max=1024)])
modulus = StringField(validators=[validators.Regexp(hex_regexp)])
exponent = StringField(validators=[validators.Regexp(hex_regexp)])
| 36.790698
| 76
| 0.767383
| 172
| 1,582
| 6.994186
| 0.25
| 0.266002
| 0.386534
| 0.276808
| 0.716542
| 0.716542
| 0.716542
| 0.674148
| 0.674148
| 0.674148
| 0
| 0.026893
| 0.106827
| 1,582
| 43
| 77
| 36.790698
| 0.824487
| 0.025917
| 0
| 0.481481
| 0
| 0
| 0.007792
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.962963
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7184bf9751f74bffcaac3c862086394d9cbd3a00
| 3,487
|
py
|
Python
|
lib/parsedatetime/tests/TestComplexDateTimes.py
|
r3tard/BartusBot
|
baa0e775a8495c696ca274d78f898eb74d8fa961
|
[
"Apache-2.0"
] | 1
|
2015-11-01T00:16:41.000Z
|
2015-11-01T00:16:41.000Z
|
lib/parsedatetime-1.5/parsedatetime/tests/TestComplexDateTimes.py
|
r3tard/BartusBot
|
baa0e775a8495c696ca274d78f898eb74d8fa961
|
[
"Apache-2.0"
] | 12
|
2015-10-30T20:22:53.000Z
|
2016-02-09T21:56:17.000Z
|
lib/parsedatetime/tests/TestComplexDateTimes.py
|
r3tard/BartusBot
|
baa0e775a8495c696ca274d78f898eb74d8fa961
|
[
"Apache-2.0"
] | null | null | null |
"""
Test parsing of complex date and times
"""
import unittest, time, datetime
import parsedatetime as pdt
class test(unittest.TestCase):
@pdt.tests.assertEqualWithComparator
def assertExpectedResult(self, result, check, **kwargs):
return pdt.tests.compareResultByTimeTuplesAndFlags(result, check, **kwargs)
def setUp(self):
self.cal = pdt.Calendar()
self.yr, self.mth, self.dy, self.hr, self.mn, self.sec, self.wd, self.yd, self.isdst = time.localtime()
def testDates(self):
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
target = datetime.datetime(2006, 8, 25, 17, 0, 0).timetuple()
self.assertExpectedResult(self.cal.parse('08/25/2006 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm on 08.25.2006', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm August 25, 2006', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm August 25th, 2006', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm 25 August, 2006', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm 25th August, 2006', start), (target, 3))
self.assertExpectedResult(self.cal.parse('Aug 25, 2006 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('Aug 25th, 2006 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('25 Aug, 2006 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('25th Aug 2006, 5pm', start), (target, 3))
if self.mth > 8 or (self.mth == 8 and self.dy > 5):
target = datetime.datetime(self.yr + 1, 8, 5, 17, 0, 0).timetuple()
else:
target = datetime.datetime(self.yr, 8, 5, 17, 0, 0).timetuple()
self.assertExpectedResult(self.cal.parse('8/5 at 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm 8.5', start), (target, 3))
self.assertExpectedResult(self.cal.parse('08/05 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('August 5 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm Aug 05', start), (target, 3))
self.assertExpectedResult(self.cal.parse('Aug 05 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('Aug 05th 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5 August 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5th August 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('5pm 05 Aug', start), (target, 3))
self.assertExpectedResult(self.cal.parse('05 Aug 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('05th Aug 5pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('August 5th 5pm', start), (target, 3))
if self.mth > 8 or (self.mth == 8 and self.dy > 5):
target = datetime.datetime(self.yr + 1, 8, 5, 12, 0, 0).timetuple()
else:
target = datetime.datetime(self.yr, 8, 5, 12, 0, 0).timetuple()
self.assertExpectedResult(self.cal.parse('August 5th 12pm', start), (target, 3))
self.assertExpectedResult(self.cal.parse('August 5th 12:00', start), (target, 3))
if __name__ == "__main__":
unittest.main()
| 52.044776
| 111
| 0.639231
| 456
| 3,487
| 4.870614
| 0.153509
| 0.280955
| 0.315173
| 0.348942
| 0.78208
| 0.78208
| 0.773075
| 0.769023
| 0.704638
| 0.613237
| 0
| 0.06746
| 0.205047
| 3,487
| 66
| 112
| 52.833333
| 0.733766
| 0.010898
| 0
| 0.085106
| 0
| 0
| 0.104651
| 0
| 0
| 0
| 0
| 0
| 0.574468
| 1
| 0.06383
| false
| 0
| 0.042553
| 0.021277
| 0.148936
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7192d0b16ee7f42249b9b62ae26be8b960fa6170
| 43
|
py
|
Python
|
HelloWorld.py
|
parth58199/Hello-World
|
6f7c5cecd56eeaea2a701e1a32c126c3da80ac69
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
parth58199/Hello-World
|
6f7c5cecd56eeaea2a701e1a32c126c3da80ac69
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
parth58199/Hello-World
|
6f7c5cecd56eeaea2a701e1a32c126c3da80ac69
|
[
"MIT"
] | null | null | null |
print('Hello World')
print("hello world!")
| 14.333333
| 21
| 0.697674
| 6
| 43
| 5
| 0.5
| 0.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 2
| 22
| 21.5
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0.534884
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
71d453167e30121c408288e193c2a9038a9ceccd
| 42
|
py
|
Python
|
src/lib/py_compile.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/py_compile.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/py_compile.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("py_compile")
| 21
| 41
| 0.785714
| 7
| 42
| 3.857143
| 0.714286
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0b7b47b64c84fdc1e2f20c0f469ed68456e20df
| 16,192
|
py
|
Python
|
tests/hwsim/test_eap.py
|
wolfssl-jp/wolfssl_hostapd
|
458ca6d59a6dac97b3d6870132740b255ca7929d
|
[
"Unlicense"
] | 3
|
2021-09-07T18:41:51.000Z
|
2021-09-17T21:50:52.000Z
|
tests/hwsim/test_eap.py
|
kareem-wolfssl/wolfssl_hostapd
|
df2d4bae478c99086db2decc662ef440079fa63f
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_eap.py
|
kareem-wolfssl/wolfssl_hostapd
|
df2d4bae478c99086db2decc662ef440079fa63f
|
[
"Unlicense"
] | 2
|
2021-09-02T23:36:42.000Z
|
2021-09-19T22:53:48.000Z
|
# EAP authentication tests
# Copyright (c) 2019, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import hostapd
from utils import alloc_fail, fail_test, wait_fail_trigger, HwsimSkip
from test_ap_eap import check_eap_capa, int_eap_server_params, eap_connect, \
eap_reauth
def int_teap_server_params(eap_teap_auth=None, eap_teap_pac_no_inner=None):
params = int_eap_server_params()
params['pac_opaque_encr_key'] = "000102030405060708090a0b0c0dff00"
params['eap_fast_a_id'] = "101112131415161718191a1b1c1dff00"
params['eap_fast_a_id_info'] = "test server 0"
if eap_teap_auth:
params['eap_teap_auth'] = eap_teap_auth
if eap_teap_pac_no_inner:
params['eap_teap_pac_no_inner'] = eap_teap_pac_no_inner
return params
def test_eap_teap_eap_mschapv2(dev, apdev):
"""EAP-TEAP with inner EAP-MSCHAPv2"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
eap_reauth(dev[0], "TEAP")
def test_eap_teap_eap_pwd(dev, apdev):
"""EAP-TEAP with inner EAP-PWD"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "PWD")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user-pwd-2",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=PWD",
pac_file="blob://teap_pac")
def test_eap_teap_eap_eke(dev, apdev):
"""EAP-TEAP with inner EAP-EKE"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "EKE")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user-eke-2",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=EKE",
pac_file="blob://teap_pac")
def test_eap_teap_basic_password_auth(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem",
pac_file="blob://teap_pac")
def test_eap_teap_basic_password_auth_failure(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth failure"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="incorrect",
ca_cert="auth_serv/ca.pem",
pac_file="blob://teap_pac", expect_failure=True)
def test_eap_teap_basic_password_auth_no_password(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth and no password configured"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem",
pac_file="blob://teap_pac", expect_failure=True)
def test_eap_teap_peer_outer_tlvs(dev, apdev):
"""EAP-TEAP with peer Outer TLVs"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", phase1="teap_test_outer_tlvs=1")
def test_eap_teap_eap_mschapv2_pac(dev, apdev):
"""EAP-TEAP with inner EAP-MSCHAPv2 and PAC provisioning"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_eap_mschapv2_pac_no_inner_eap(dev, apdev):
"""EAP-TEAP with inner EAP-MSCHAPv2 and PAC without inner EAP"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = int_teap_server_params(eap_teap_pac_no_inner="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_eap_mschapv2_pac_no_ca_cert(dev, apdev):
"""EAP-TEAP with inner EAP-MSCHAPv2 and PAC provisioning attempt without ca_cert"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2",
phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] == '1':
raise Exception("Unexpected use of PAC session ticket")
def test_eap_teap_basic_password_auth_pac(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth and PAC"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_basic_password_auth_pac_binary(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth and PAC (binary)"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2 teap_max_pac_list_len=2 teap_pac_format=binary",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac_bin")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_basic_password_auth_pac_no_inner_eap(dev, apdev):
"""EAP-TEAP with Basic-Password-Auth and PAC without inner auth"""
check_eap_capa(dev[0], "TEAP")
params = int_teap_server_params(eap_teap_auth="1",
eap_teap_pac_no_inner="1")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_eap_eke_unauth_server_prov(dev, apdev):
"""EAP-TEAP with inner EAP-EKE and unauthenticated server provisioning"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "EKE")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user-eke-2",
anonymous_identity="TEAP", password="password",
phase1="teap_provisioning=1",
phase2="auth=EKE", pac_file="blob://teap_pac")
res = eap_reauth(dev[0], "TEAP")
if res['tls_session_reused'] != '1':
raise Exception("EAP-TEAP could not use PAC session ticket")
def test_eap_teap_fragmentation(dev, apdev):
"""EAP-TEAP with fragmentation"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", fragment_size="100")
def test_eap_teap_tls_cs_sha1(dev, apdev):
"""EAP-TEAP with TLS cipher suite that uses SHA-1"""
run_eap_teap_tls_cs(dev, apdev, "AES128-SHA")
def test_eap_teap_tls_cs_sha256(dev, apdev):
"""EAP-TEAP with TLS cipher suite that uses SHA-256"""
run_eap_teap_tls_cs(dev, apdev, "AES128-SHA256")
def test_eap_teap_tls_cs_sha384(dev, apdev):
"""EAP-TEAP with TLS cipher suite that uses SHA-384"""
run_eap_teap_tls_cs(dev, apdev, "AES256-GCM-SHA384")
def run_eap_teap_tls_cs(dev, apdev, cipher):
check_eap_capa(dev[0], "TEAP")
tls = dev[0].request("GET tls_library")
if not tls.startswith("OpenSSL") and not tls.startswith("wolfSSL"):
raise HwsimSkip("TLS library not supported for TLS CS configuration: " + tls)
params = int_teap_server_params(eap_teap_auth="1")
params['openssl_ciphers'] = cipher
hapd = hostapd.add_ap(apdev[0], params)
eap_connect(dev[0], hapd, "TEAP", "user",
anonymous_identity="TEAP", password="password",
ca_cert="auth_serv/ca.pem",
pac_file="blob://teap_pac")
def wait_eap_proposed(dev, wait_trigger=None):
ev = dev.wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=10)
if ev is None:
raise Exception("Timeout on EAP start")
if wait_trigger:
wait_fail_trigger(dev, wait_trigger)
dev.request("REMOVE_NETWORK all")
dev.wait_disconnected()
dev.dump_monitor()
def test_eap_teap_errors(dev, apdev):
"""EAP-TEAP local errors"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
wait_connect=False)
wait_eap_proposed(dev[0])
dev[0].set("blob", "teap_broken_pac 11")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_broken_pac", wait_connect=False)
wait_eap_proposed(dev[0])
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
phase1="teap_pac_format=binary",
pac_file="blob://teap_broken_pac", wait_connect=False)
wait_eap_proposed(dev[0])
tests = [(1, "eap_teap_tlv_eap_payload"),
(1, "eap_teap_process_eap_payload_tlv"),
(1, "eap_teap_compound_mac"),
(1, "eap_teap_tlv_result"),
(1, "eap_peer_select_phase2_methods"),
(1, "eap_peer_tls_ssl_init"),
(1, "eap_teap_session_id"),
(1, "wpabuf_alloc;=eap_teap_process_crypto_binding"),
(1, "eap_peer_tls_encrypt"),
(1, "eap_peer_tls_decrypt"),
(1, "eap_teap_getKey"),
(1, "eap_teap_session_id"),
(1, "eap_teap_init")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", wait_connect=False)
wait_eap_proposed(dev[0], wait_trigger="GET_ALLOC_FAIL")
tests = [(1, "eap_teap_derive_eap_msk"),
(1, "eap_teap_derive_eap_emsk"),
(1, "eap_teap_write_crypto_binding"),
(1, "eap_teap_process_crypto_binding"),
(1, "eap_teap_derive_msk;eap_teap_process_crypto_binding"),
(1, "eap_teap_compound_mac;eap_teap_process_crypto_binding"),
(1, "eap_teap_derive_imck")]
for count, func in tests:
with fail_test(dev[0], count, func):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", wait_connect=False)
wait_eap_proposed(dev[0], wait_trigger="GET_FAIL")
def test_eap_teap_errors2(dev, apdev):
"""EAP-TEAP local errors 2 (Basic-Password-Auth specific)"""
check_eap_capa(dev[0], "TEAP")
check_eap_capa(dev[0], "MSCHAPV2")
params = int_teap_server_params(eap_teap_auth="1")
hapd = hostapd.add_ap(apdev[0], params)
tests = [(1, "eap_teap_tlv_pac_ack"),
(1, "eap_teap_process_basic_auth_req")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", wait_connect=False)
wait_eap_proposed(dev[0], wait_trigger="GET_ALLOC_FAIL")
tests = [(1, "eap_teap_derive_cmk_basic_pw_auth")]
for count, func in tests:
with fail_test(dev[0], count, func):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP",
scan_freq="2412",
eap="TEAP", identity="user", password="password",
anonymous_identity="TEAP",
phase1="teap_provisioning=2",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
pac_file="blob://teap_pac", wait_connect=False)
wait_eap_proposed(dev[0], wait_trigger="GET_FAIL")
| 46.262857
| 92
| 0.621603
| 2,214
| 16,192
| 4.251129
| 0.093044
| 0.071398
| 0.038249
| 0.046218
| 0.832129
| 0.805461
| 0.781874
| 0.762856
| 0.731619
| 0.70952
| 0
| 0.028935
| 0.238019
| 16,192
| 349
| 93
| 46.395415
| 0.733911
| 0.06707
| 0
| 0.682594
| 0
| 0
| 0.238851
| 0.046397
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078498
| false
| 0.095563
| 0.010239
| 0
| 0.09215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1ca0eceebf5a30fc80ec4fa3878150dd2da64b68
| 145
|
py
|
Python
|
vision/command/__init__.py
|
JackGoldsworth/Vision
|
084330bec340596167944b623bc7b8d7d9c26b01
|
[
"MIT"
] | null | null | null |
vision/command/__init__.py
|
JackGoldsworth/Vision
|
084330bec340596167944b623bc7b8d7d9c26b01
|
[
"MIT"
] | 1
|
2018-08-20T18:35:48.000Z
|
2019-01-10T02:56:12.000Z
|
vision/command/__init__.py
|
JackGoldsworth/Vision
|
084330bec340596167944b623bc7b8d7d9c26b01
|
[
"MIT"
] | null | null | null |
from command.command_handler import CommandHandler
from command.command_info import CommandInfo
from command.command_parser import CommandParser
| 36.25
| 50
| 0.896552
| 18
| 145
| 7.055556
| 0.5
| 0.259843
| 0.425197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082759
| 145
| 3
| 51
| 48.333333
| 0.954887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c0e0f277dee6bd8089d98ea9166298954440eaf
| 17,463
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/ams/tests/latest/test_ams_streaming_policy_scenarios.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | 2
|
2020-08-08T11:00:25.000Z
|
2020-08-08T11:00:30.000Z
|
src/azure-cli/azure/cli/command_modules/ams/tests/latest/test_ams_streaming_policy_scenarios.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | 2
|
2021-01-15T09:24:07.000Z
|
2021-01-15T09:30:10.000Z
|
src/azure-cli/azure/cli/command_modules/ams/tests/latest/test_ams_streaming_policy_scenarios.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | 1
|
2020-11-12T01:49:27.000Z
|
2020-11-12T01:49:27.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer
from azure.cli.command_modules.ams._test_utils import _get_test_data_file
class AmsStreamingPolicyTests(ScenarioTest):
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'westus'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'West US')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS'
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --no-encryption-protocols {protocols}', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('resourceGroup', '{rg}')
])
self.cmd('az ams streaming-policy show -a {amsname} -n {streamingPolicyName} -g {rg}', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('noEncryption.enabledProtocols.hls', True)
])
list = self.cmd('az ams streaming-policy list -a {amsname} -g {rg}').get_output_in_json()
assert len(list) > 0
self.cmd('az ams streaming-policy delete -n {streamingPolicyName} -a {amsname} -g {rg}')
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_envelope(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'eastus2'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'East US 2')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS Dash',
'urlTemplate': 'xyz.foo.bar',
'label': 'label'
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --envelope-protocols {protocols} --envelope-template {urlTemplate} --envelope-default-key-label {label}', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('envelopeEncryption.enabledProtocols.hls', True),
self.check('envelopeEncryption.enabledProtocols.dash', True),
self.check('envelopeEncryption.contentKeys.defaultKey.label', '{label}'),
self.check('envelopeEncryption.customKeyAcquisitionUrlTemplate', '{urlTemplate}')
])
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_cenc(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'westus2'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'West US 2')
])
policy_option_name1 = self.create_random_name(prefix='pon', length=12)
policy_option_name2 = self.create_random_name(prefix='pon', length=12)
self.kwargs.update({
'description': 'ExampleDescription',
'playReadyPath': '@' + _get_test_data_file('validPlayReadyTemplate.json'),
'policyNameFromKeyToTrackMappings1': 'ckp',
'policyNameFromKeyToTrackMappings2': 'demoPolicy15',
'policyOptionName1': policy_option_name1,
'policyOptionName2': policy_option_name2
})
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings1} -g {rg} --description {description} --open-restriction --play-ready-template "{playReadyPath}" --policy-option-name {policyOptionName1}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings1}')
])
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings2} -g {rg} --description {description} --open-restriction --clear-key-configuration --policy-option-name {policyOptionName2}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings2}')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS SmoothStreaming',
'clearTracks': '@' + _get_test_data_file('clearTracks.json'),
'keyToTrackMappings': '@' + _get_test_data_file('keyToTrackMappings.json'),
'label': 'label',
'playReadyUrlTemplate': 'playReadyTemplate.foo.bar',
'playReadyAttributes': 'awesomeAttributes',
'widevineUrlTemplate': 'widevineTemplate.foo.bar'
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --cenc-protocols {protocols} --cenc-clear-tracks "{clearTracks}" --cenc-key-to-track-mappings "{keyToTrackMappings}" --cenc-default-key-label {label} --cenc-default-key-policy-name {policyNameFromKeyToTrackMappings1} --cenc-play-ready-template {playReadyUrlTemplate} --cenc-play-ready-attributes {playReadyAttributes} --cenc-widevine-template {widevineUrlTemplate}', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('commonEncryptionCenc.enabledProtocols.hls', True),
self.check('commonEncryptionCenc.enabledProtocols.smoothStreaming', True),
self.check('commonEncryptionCenc.contentKeys.defaultKey.label', '{label}'),
self.check('commonEncryptionCenc.drm.playReady.customLicenseAcquisitionUrlTemplate', '{playReadyUrlTemplate}'),
self.check('commonEncryptionCenc.drm.playReady.playReadyCustomAttributes', '{playReadyAttributes}'),
self.check('commonEncryptionCenc.drm.widevine.customLicenseAcquisitionUrlTemplate', '{widevineUrlTemplate}'),
])
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_cenc_default_drm(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'westus2'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'West US 2')
])
policy_option_name1 = self.create_random_name(prefix='pon', length=12)
policy_option_name2 = self.create_random_name(prefix='pon', length=12)
self.kwargs.update({
'description': 'ExampleDescription',
'playReadyPath': '@' + _get_test_data_file('validPlayReadyTemplate.json'),
'policyNameFromKeyToTrackMappings1': 'ckp',
'policyNameFromKeyToTrackMappings2': 'demoPolicy15',
'policyOptionName1': policy_option_name1,
'policyOptionName2': policy_option_name2
})
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings1} -g {rg} --description {description} --open-restriction --play-ready-template "{playReadyPath}" --policy-option-name {policyOptionName1}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings1}')
])
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings2} -g {rg} --description {description} --open-restriction --clear-key-configuration --policy-option-name {policyOptionName2}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings2}')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS SmoothStreaming',
'clearTracks': '@' + _get_test_data_file('clearTracks.json'),
'keyToTrackMappings': '@' + _get_test_data_file('keyToTrackMappings.json'),
'label': 'label',
'playReadyUrlTemplate': 'playReadyTemplate.foo.bar',
'playReadyAttributes': 'awesomeAttributes'
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --cenc-protocols {protocols} --cenc-clear-tracks "{clearTracks}" --cenc-key-to-track-mappings "{keyToTrackMappings}" --cenc-default-key-label {label}', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('commonEncryptionCenc.enabledProtocols.hls', True),
self.check('commonEncryptionCenc.enabledProtocols.smoothStreaming', True),
self.check('commonEncryptionCenc.contentKeys.defaultKey.label', '{label}'),
self.check('commonEncryptionCenc.drm.playReady.customLicenseAcquisitionUrlTemplate', None),
self.check('commonEncryptionCenc.drm.playReady.playReadyCustomAttributes', None),
self.check('commonEncryptionCenc.drm.widevine.customLicenseAcquisitionUrlTemplate', None),
])
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_cenc_disable_widevine(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'westus2'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'West US 2')
])
policy_option_name1 = self.create_random_name(prefix='pon', length=12)
policy_option_name2 = self.create_random_name(prefix='pon', length=12)
self.kwargs.update({
'description': 'ExampleDescription',
'playReadyPath': '@' + _get_test_data_file('validPlayReadyTemplate.json'),
'policyNameFromKeyToTrackMappings1': 'ckp',
'policyNameFromKeyToTrackMappings2': 'demoPolicy15',
'policyOptionName1': policy_option_name1,
'policyOptionName2': policy_option_name2
})
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings1} -g {rg} --description {description} --open-restriction --play-ready-template "{playReadyPath}" --policy-option-name {policyOptionName1}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings1}')
])
self.cmd('az ams content-key-policy create -a {amsname} -n {policyNameFromKeyToTrackMappings2} -g {rg} --description {description} --open-restriction --clear-key-configuration --policy-option-name {policyOptionName2}', checks=[
self.check('name', '{policyNameFromKeyToTrackMappings2}')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS SmoothStreaming',
'clearTracks': '@' + _get_test_data_file('clearTracks.json'),
'keyToTrackMappings': '@' + _get_test_data_file('keyToTrackMappings.json'),
'label': 'label',
'playReadyUrlTemplate': 'playReadyTemplate.foo.bar',
'playReadyAttributes': 'awesomeAttributes'
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --cenc-protocols {protocols} --cenc-clear-tracks "{clearTracks}" --cenc-key-to-track-mappings "{keyToTrackMappings}" --cenc-default-key-label {label} --cenc-disable-widevine', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('commonEncryptionCenc.enabledProtocols.hls', True),
self.check('commonEncryptionCenc.enabledProtocols.smoothStreaming', True),
self.check('commonEncryptionCenc.contentKeys.defaultKey.label', '{label}'),
self.check('commonEncryptionCenc.drm.playReady.customLicenseAcquisitionUrlTemplate', None),
self.check('commonEncryptionCenc.drm.playReady.playReadyCustomAttributes', None),
self.check('commonEncryptionCenc.drm.widevine.customLicenseAcquisitionUrlTemplate', None),
])
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_cbcs(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'canadacentral'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'Canada Central')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS SmoothStreaming Dash',
'label': 'label',
'urlTemplate': 'xyz.foo.bar',
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --cbcs-protocols {protocols} --cbcs-fair-play-template {urlTemplate} --cbcs-default-key-label {label} --cbcs-fair-play-allow-persistent-license', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('commonEncryptionCbcs.enabledProtocols.hls', True),
self.check('commonEncryptionCbcs.enabledProtocols.smoothStreaming', True),
self.check('commonEncryptionCbcs.enabledProtocols.dash', True),
self.check('commonEncryptionCbcs.contentKeys.defaultKey.label', '{label}'),
self.check('commonEncryptionCbcs.drm.fairPlay.customLicenseAcquisitionUrlTemplate', '{urlTemplate}'),
self.check('commonEncryptionCbcs.drm.fairPlay.allowPersistentLicense', True),
])
@ResourceGroupPreparer()
@StorageAccountPreparer(parameter_name='storage_account_for_create')
def test_ams_streaming_policy_cbcs_default_drm(self, resource_group, storage_account_for_create):
amsname = self.create_random_name(prefix='ams', length=12)
self.kwargs.update({
'amsname': amsname,
'storageAccount': storage_account_for_create,
'location': 'canadacentral'
})
self.cmd('az ams account create -n {amsname} -g {rg} --storage-account {storageAccount} -l {location}', checks=[
self.check('name', '{amsname}'),
self.check('location', 'Canada Central')
])
streamingPolicyName = self.create_random_name(prefix='spn', length=10)
self.kwargs.update({
'streamingPolicyName': streamingPolicyName,
'protocols': 'HLS SmoothStreaming Dash',
'label': 'label',
'urlTemplate': 'xyz.foo.bar',
})
self.cmd('az ams streaming-policy create -a {amsname} -n {streamingPolicyName} -g {rg} --cbcs-protocols {protocols} --cbcs-default-key-label {label} ', checks=[
self.check('name', '{streamingPolicyName}'),
self.check('commonEncryptionCbcs.enabledProtocols.hls', True),
self.check('commonEncryptionCbcs.enabledProtocols.smoothStreaming', True),
self.check('commonEncryptionCbcs.enabledProtocols.dash', True),
self.check('commonEncryptionCbcs.contentKeys.defaultKey.label', '{label}'),
self.check('commonEncryptionCbcs.drm.fairPlay.customLicenseAcquisitionUrlTemplate', None),
self.check('commonEncryptionCbcs.drm.fairPlay.allowPersistentLicense', False),
])
| 53.240854
| 470
| 0.657848
| 1,583
| 17,463
| 7.11813
| 0.102337
| 0.051118
| 0.018371
| 0.024494
| 0.902467
| 0.894569
| 0.857561
| 0.849219
| 0.849219
| 0.849219
| 0
| 0.007019
| 0.200481
| 17,463
| 327
| 471
| 53.40367
| 0.800029
| 0.019241
| 0
| 0.8327
| 0
| 0.08365
| 0.486917
| 0.238407
| 0
| 0
| 0
| 0
| 0.003802
| 1
| 0.026616
| false
| 0
| 0.011407
| 0
| 0.041825
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c48f24ae3c32c49052dd25f913598d1564702d8
| 154
|
py
|
Python
|
moztrap/view/users/context_processors.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
moztrap/view/users/context_processors.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
moztrap/view/users/context_processors.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Auth-related context processors.
"""
from django.conf import settings
def browserid(request):
return {"USE_BROWSERID": settings.USE_BROWSERID}
| 15.4
| 52
| 0.746753
| 18
| 154
| 6.277778
| 0.777778
| 0.212389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 154
| 9
| 53
| 17.111111
| 0.849624
| 0.207792
| 0
| 0
| 0
| 0
| 0.115044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
1c85e1f0fcb2532fba16b10fa8fc6f928452b44e
| 306
|
py
|
Python
|
codes/models/archs/dcn/__init__.py
|
Johnson-yue/mmsr
|
3549100670c48828901f414a75d93dc822bb8eaa
|
[
"Apache-2.0"
] | 130
|
2020-09-21T04:20:41.000Z
|
2022-03-26T03:14:20.000Z
|
codes/models/archs/dcn/__init__.py
|
Johnson-yue/mmsr
|
3549100670c48828901f414a75d93dc822bb8eaa
|
[
"Apache-2.0"
] | 10
|
2020-10-11T21:25:27.000Z
|
2021-11-10T05:48:11.000Z
|
codes/models/archs/dcn/__init__.py
|
Johnson-yue/mmsr
|
3549100670c48828901f414a75d93dc822bb8eaa
|
[
"Apache-2.0"
] | 23
|
2020-10-01T06:11:14.000Z
|
2022-03-03T02:01:26.000Z
|
from .deform_conv import (DeformConv, DeformConvPack, ModulatedDeformConv, ModulatedDeformConvPack,
deform_conv, modulated_deform_conv)
__all__ = [
'DeformConv', 'DeformConvPack', 'ModulatedDeformConv', 'ModulatedDeformConvPack', 'deform_conv',
'modulated_deform_conv'
]
| 38.25
| 100
| 0.732026
| 23
| 306
| 9.26087
| 0.434783
| 0.234742
| 0.403756
| 0.619718
| 0.892019
| 0.892019
| 0.892019
| 0.892019
| 0.892019
| 0
| 0
| 0
| 0.176471
| 306
| 7
| 101
| 43.714286
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0.320261
| 0.143791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1c9c9e87366aa1268e58eb2a728a3e331258fe83
| 149
|
py
|
Python
|
src/csbuilder/session/__init__.py
|
huykingsofm/csbuilder
|
c6ba6f0dd3fd2a0d03c7492de20a7107cb1b9191
|
[
"MIT"
] | null | null | null |
src/csbuilder/session/__init__.py
|
huykingsofm/csbuilder
|
c6ba6f0dd3fd2a0d03c7492de20a7107cb1b9191
|
[
"MIT"
] | null | null | null |
src/csbuilder/session/__init__.py
|
huykingsofm/csbuilder
|
c6ba6f0dd3fd2a0d03c7492de20a7107cb1b9191
|
[
"MIT"
] | null | null | null |
from csbuilder.session.session import Session
from csbuilder.session.manager import SessionManager
from csbuilder.session.result import SessionResult
| 49.666667
| 52
| 0.885906
| 18
| 149
| 7.333333
| 0.444444
| 0.295455
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073826
| 149
| 3
| 53
| 49.666667
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98e84188d060ab65c7151f6f1123b3322e1a054d
| 4,032
|
py
|
Python
|
source/methods_feature_extract.py
|
Deadshot20/Sentence-Simplification-ACL14
|
5571af38073ca856ca5800748b42e0cdb87c86f7
|
[
"BSD-3-Clause"
] | 47
|
2016-05-03T16:00:03.000Z
|
2022-01-04T14:15:25.000Z
|
source/methods_feature_extract.py
|
Deadshot20/Sentence-Simplification-ACL14
|
5571af38073ca856ca5800748b42e0cdb87c86f7
|
[
"BSD-3-Clause"
] | 10
|
2017-04-30T18:50:54.000Z
|
2020-11-03T16:54:26.000Z
|
source/methods_feature_extract.py
|
shashiongithub/Sentence-Simplification-ACL14
|
5571af38073ca856ca5800748b42e0cdb87c86f7
|
[
"BSD-3-Clause"
] | 12
|
2016-07-22T09:58:12.000Z
|
2020-07-21T12:56:27.000Z
|
#!/usr/bin/env python
#===================================================================================
#description : Methods for features exploration =
#author : Shashi Narayan, shashi.narayan(at){ed.ac.uk,loria.fr,gmail.com})=
#date : Created in 2014, Later revised in April 2016. =
#version : 0.1 =
#===================================================================================
class Feature_Nov27:
def get_split_feature(self, split_tuple, parent_sentence, children_sentence_list, boxer_graph):
# Calculating iLength
#iLength = boxer_graph.calculate_iLength(parent_sentence, children_sentence_list)
# Get split tuple pattern
split_pattern = boxer_graph.get_pattern_4_split_candidate(split_tuple)
#split_feature = split_pattern+"_"+str(iLength)
split_feature = split_pattern
return split_feature
def get_drop_ood_feature(self, ood_node, nodeset, main_sent_dict, boxer_graph):
ood_word = boxer_graph.extract_oodword(ood_node, main_sent_dict)
ood_position = boxer_graph.nodes[ood_node]["positions"][0] # length of positions is one
span = boxer_graph.extract_span_min_max(nodeset)
boundaryVal = "false"
if ood_position <= span[0] or ood_position >= span[1]:
boundaryVal = "true"
drop_ood_feature = ood_word+"_"+boundaryVal
return drop_ood_feature
def get_drop_rel_feature(self, rel_node, nodeset, main_sent_dict, boxer_graph):
rel_word = boxer_graph.relations[rel_node]["predicates"]
rel_span = boxer_graph.extract_span_for_nodeset_with_rel(rel_node, nodeset)
drop_rel_feature = rel_word+"_"
if len(rel_span) <= 2:
drop_rel_feature += "0-2"
elif len(rel_span) <= 5:
drop_rel_feature += "2-5"
elif len(rel_span) <= 10:
drop_rel_feature += "5-10"
elif len(rel_span) <= 15:
drop_rel_feature += "10-15"
else:
drop_rel_feature += "gt15"
return drop_rel_feature
def get_drop_mod_feature(self, mod_cand, main_sent_dict, boxer_graph):
mod_pos = int(mod_cand[0])
mod_word = main_sent_dict[mod_pos][0]
#mod_node = mod_cand[1]
drop_mod_feature = mod_word
return drop_mod_feature
class Feature_Init:
def get_split_feature(self, split_tuple, parent_sentence, children_sentence_list, boxer_graph):
# Calculating iLength
iLength = boxer_graph.calculate_iLength(parent_sentence, children_sentence_list)
# Get split tuple pattern
split_pattern = boxer_graph.get_pattern_4_split_candidate(split_tuple)
split_feature = split_pattern+"_"+str(iLength)
return split_feature
def get_drop_ood_feature(self, ood_node, nodeset, main_sent_dict, boxer_graph):
ood_word = boxer_graph.extract_oodword(ood_node, main_sent_dict)
ood_position = boxer_graph.nodes[ood_node]["positions"][0] # length of positions is one
span = boxer_graph.extract_span_min_max(nodeset)
boundaryVal = "false"
if ood_position <= span[0] or ood_position >= span[1]:
boundaryVal = "true"
drop_ood_feature = ood_word+"_"+boundaryVal
return drop_ood_feature
def get_drop_rel_feature(self, rel_node, nodeset, main_sent_dict, boxer_graph):
rel_word = boxer_graph.relations[rel_node]["predicates"]
rel_span = boxer_graph.extract_span_for_nodeset_with_rel(rel_node, nodeset)
drop_rel_feature = rel_word+"_"+str(len(rel_span))
return drop_rel_feature
def get_drop_mod_feature(self, mod_cand, main_sent_dict, boxer_graph):
mod_pos = int(mod_cand[0])
mod_word = main_sent_dict[mod_pos][0]
#mod_node = mod_cand[1]
drop_mod_feature = mod_word
return drop_mod_feature
| 46.344828
| 120
| 0.630208
| 505
| 4,032
| 4.617822
| 0.188119
| 0.09434
| 0.066038
| 0.043739
| 0.837907
| 0.837907
| 0.837907
| 0.837907
| 0.837907
| 0.837907
| 0
| 0.014847
| 0.248264
| 4,032
| 86
| 121
| 46.883721
| 0.754536
| 0.214782
| 0
| 0.711864
| 0
| 0
| 0.025413
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135593
| false
| 0
| 0
| 0
| 0.305085
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7738171254cc98bf389f52f8eeef741c9f8a41e
| 3,455
|
py
|
Python
|
tests/test_models/test_cait.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 61
|
2022-01-08T19:06:48.000Z
|
2022-03-28T07:56:19.000Z
|
tests/test_models/test_cait.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 1
|
2022-02-27T01:15:57.000Z
|
2022-02-28T13:31:50.000Z
|
tests/test_models/test_cait.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 2
|
2022-01-09T10:01:49.000Z
|
2022-02-03T23:19:24.000Z
|
import unittest
import jax.numpy as jnp
import jax.random as random
from jax_models.models.cait import *
class TestmodelTransformer(unittest.TestCase):
def test_headless_output_shape(self):
rng1, rng2, rng3 = random.split(random.PRNGKey(0), 3)
model = CaiT(attach_head=False)
x = jnp.zeros([1, 224, 224, 3])
params = model.init(
{"params": rng1, "dropout": rng2, "drop_path": rng3}, x, False
)["params"]
out = model.apply(
{"params": params},
x,
False,
rngs={"dropout": rng2, "drop_path": rng3},
)
self.assertEqual(out.shape, (1, 768))
def test_head_output_shape(self):
rng1, rng2, rng3 = random.split(random.PRNGKey(0), 3)
model = CaiT(attach_head=True)
x = jnp.zeros([1, 224, 224, 3])
params = model.init(
{"params": rng1, "dropout": rng2, "drop_path": rng3}, x, False
)["params"]
out = model.apply(
{"params": params},
x,
False,
rngs={"dropout": rng2, "drop_path": rng3},
)
self.assertEqual(out.shape, (1, 1000))
def test_pretrained_weights(self):
x = jnp.zeros([1, 224, 224, 3])
y = jnp.zeros([1, 384, 384, 3])
z = jnp.zeros([1, 448, 448, 3])
model, params = cait_xxs24_224(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
x,
True,
)
model, params = cait_xxs24_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_xxs36_224(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
x,
True,
)
model, params = cait_xxs36_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_xs24_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_s24_224(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
x,
True,
)
model, params = cait_s24_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_s36_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_m36_384(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
y,
True,
)
model, params = cait_m48_448(
pretrained=True, download_dir="weights/cait_weights/"
)
model.apply(
{"params": params},
z,
True,
)
| 25.977444
| 74
| 0.48712
| 346
| 3,455
| 4.705202
| 0.176301
| 0.07371
| 0.117936
| 0.162162
| 0.830467
| 0.820639
| 0.820639
| 0.810197
| 0.810197
| 0.810197
| 0
| 0.056613
| 0.391606
| 3,455
| 132
| 75
| 26.174242
| 0.717888
| 0
| 0
| 0.598291
| 0
| 0
| 0.107091
| 0.060781
| 0
| 0
| 0
| 0
| 0.017094
| 1
| 0.025641
| false
| 0
| 0.034188
| 0
| 0.068376
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7ee9169404e6c4fb77d49dc16cd6aa8fa25ad99
| 361
|
py
|
Python
|
odk_logger/tests/__init__.py
|
Ecotrust/formhub
|
05033bb5aa152cc2cbcd7382c2c999d82b2c3276
|
[
"BSD-2-Clause"
] | 123
|
2015-01-08T09:21:05.000Z
|
2021-11-14T19:45:23.000Z
|
odk_logger/tests/__init__.py
|
Ecotrust/formhub
|
05033bb5aa152cc2cbcd7382c2c999d82b2c3276
|
[
"BSD-2-Clause"
] | 16
|
2015-02-13T16:56:42.000Z
|
2021-02-20T23:58:43.000Z
|
odk_logger/tests/__init__.py
|
Ecotrust/formhub
|
05033bb5aa152cc2cbcd7382c2c999d82b2c3276
|
[
"BSD-2-Clause"
] | 110
|
2015-01-19T14:34:06.000Z
|
2021-02-01T14:55:11.000Z
|
from parsing_tests import *
#from instance_creation_test import *
#from test_simple_submission import *
#from test_import_tools import *
#from test_form_submission import *
#from test_update_xform_uuid import *
#from test_command_syncd_deleted_instances_fix import *
#from test_webforms import *
#from test_publish_xls import *
#from test_backup_tools import *
| 32.818182
| 55
| 0.836565
| 52
| 361
| 5.384615
| 0.442308
| 0.321429
| 0.4
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110803
| 361
| 10
| 56
| 36.1
| 0.872274
| 0.872576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
404a49b52acf746f4c6785be489461c99f634499
| 6,314
|
py
|
Python
|
model.py
|
zpreisler/denoising
|
cac5b4894759f9f3645577c4e6057a235fca906b
|
[
"MIT"
] | null | null | null |
model.py
|
zpreisler/denoising
|
cac5b4894759f9f3645577c4e6057a235fca906b
|
[
"MIT"
] | null | null | null |
model.py
|
zpreisler/denoising
|
cac5b4894759f9f3645577c4e6057a235fca906b
|
[
"MIT"
] | null | null | null |
import torch
from torch import nn,optim
from torch.utils.data import Dataset,DataLoader
class UNet(nn.Module):
def __init__(self, kernel_size = 3):
super().__init__()
self.conv_in = nn.Conv1d(in_channels=1, out_channels=128, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv0 = nn.Conv1d(in_channels=128, out_channels=128, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv1 = nn.Conv1d(in_channels=256, out_channels=256, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv2 = nn.Conv1d(in_channels=192, out_channels=192, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv_out = nn.ConvTranspose1d(in_channels=256, out_channels=1, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.down = nn.Sequential(
nn.ELU(),
nn.MaxPool1d(kernel_size=2)
)
self.center = nn.Sequential(
nn.ELU()
)
self.up = nn.Sequential(
nn.ELU(),
nn.Upsample(scale_factor=2)
)
def forward(self,x):
x = self.conv_in(x)
l1 = self.center(x)
print('',x.shape)
x = self.conv0(l1)
l2 = self.down(x)
print('',x.shape)
x = self.conv0(l2)
l3 = self.down(x)
print('',x.shape)
x = self.conv0(l3)
x = self.down(x)
print('',x.shape)
x = self.conv0(x)
x = self.center(x)
print('',x.shape)
x = self.conv0(x)
x = self.up(x)
x = torch.cat((x,l3[:,:,5:-5]),1)
print('',x.shape)
x = self.conv1(x)
x = self.up(x)
#x = torch.cat((x,l2[:,:,13:-13]),1)
print('',x.shape)
x = self.conv1(x)
x = self.up(x)
#x = torch.cat((x,l1[:,:,29:-29]),1)
#print('',x.shape)
x = self.conv_out(x)
print('',x.shape)
x = x[:,:,227:-227]
print('crop',x.shape)
return x
class UNet2(nn.Module):
def __init__(self, kernel_size = 3):
super().__init__()
self.conv_in = nn.Conv1d(in_channels=1, out_channels=128, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv0 = nn.Conv1d(in_channels=128, out_channels=128, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv1 = nn.Conv1d(in_channels=256, out_channels=256, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv2 = nn.Conv1d(in_channels=192, out_channels=192, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.conv_out = nn.ConvTranspose1d(in_channels=256, out_channels=1, padding=0, kernel_size=kernel_size,bias=True,stride=1)
self.down = nn.Sequential(
nn.ELU(),
nn.MaxPool1d(kernel_size=2)
)
self.center = nn.Sequential(
nn.ELU()
)
self.up = nn.Sequential(
nn.ELU(),
nn.Upsample(scale_factor=2)
)
self.linear = nn.Linear(2048,2048)
def forward(self,x):
x = self.conv_in(x)
l1 = self.center(x)
print('',x.shape)
x = self.conv0(l1)
l2 = self.down(x)
print('',x.shape)
x = self.conv0(l2)
l3 = self.down(x)
print('',x.shape)
x = self.conv0(l3)
x = self.down(x)
print('',x.shape)
x = self.conv0(x)
x = self.center(x)
print('',x.shape)
x = self.conv0(x)
x = self.up(x)
x = torch.cat((x,l3[:,:,5:-5]),1)
print('',x.shape)
x = self.conv1(x)
x = self.up(x)
#x = torch.cat((x,l2[:,:,13:-13]),1)
print('',x.shape)
x = self.conv1(x)
x = self.up(x)
#x = torch.cat((x,l1[:,:,29:-29]),1)
#print('',x.shape)
x = self.conv_out(x)
print('',x.shape)
x = x[:,:,227:-227]
print('crop',x.shape)
x = self.linear(x)
return x
class Encode(nn.Module):
def __init__(self,channels = 128, kernel_size = 3):
super().__init__()
self.encode = nn.Sequential(
nn.Conv1d(in_channels=1, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True,stride=1),
nn.ELU(),
nn.MaxPool1d(kernel_size=2),
nn.Conv1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True,stride=1),
nn.ELU(),
nn.MaxPool1d(kernel_size=2),
#nn.Conv1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True,stride=1),
#nn.ELU(),
#nn.MaxPool1d(kernel_size=2),
nn.Conv1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True,stride=1),
nn.ELU(),
)
def forward(self,x):
return self.encode(x)
class Decode(nn.Module):
def __init__(self,channels = 128, kernel_size = 3):
super().__init__()
self.decode = nn.Sequential(
nn.ConvTranspose1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True,stride=1),
nn.ELU(),
nn.Upsample(scale_factor=2),
nn.ConvTranspose1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True),
nn.ELU(),
nn.Upsample(scale_factor=2),
#nn.ConvTranspose1d(in_channels=channels, out_channels=channels, padding=0, kernel_size=kernel_size,bias=True),
#nn.ELU(),
#nn.Upsample(scale_factor=2),
nn.ConvTranspose1d(in_channels=channels, out_channels=1, padding=0, kernel_size=kernel_size,bias=True,stride=1),
)
#self.linear = nn.Linear(2048,1938,bias=False)
def forward(self,x):
return self.decode(x)
class Denoise(nn.Module):
def __init__(self):
super().__init__()
self.encode = Encode()
self.decode = Decode()
def forward(self,x):
x = self.encode(x)
x = self.decode(x)
x = x[:,:,255:-255]
print('Denoise shape:',x.shape)
return x
| 29.231481
| 135
| 0.557175
| 860
| 6,314
| 3.934884
| 0.081395
| 0.132979
| 0.039303
| 0.095745
| 0.906028
| 0.888593
| 0.867908
| 0.862293
| 0.862293
| 0.862293
| 0
| 0.051512
| 0.292841
| 6,314
| 215
| 136
| 29.367442
| 0.706383
| 0.081248
| 0
| 0.769231
| 0
| 0
| 0.003802
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06993
| false
| 0
| 0.020979
| 0.013986
| 0.160839
| 0.132867
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4077e1901622eaa6af1e5a3d03d5118a5d5b5a66
| 1,764
|
py
|
Python
|
TestMatrixCompletion.py
|
mharradon/LowRankPropagation
|
a0e03c2f24b71e02bb207da5e77a6428834cc7f7
|
[
"MIT"
] | 43
|
2016-04-26T15:54:34.000Z
|
2021-07-25T15:03:21.000Z
|
TestMatrixCompletion.py
|
mharradon/LowRankPropagation
|
a0e03c2f24b71e02bb207da5e77a6428834cc7f7
|
[
"MIT"
] | null | null | null |
TestMatrixCompletion.py
|
mharradon/LowRankPropagation
|
a0e03c2f24b71e02bb207da5e77a6428834cc7f7
|
[
"MIT"
] | 7
|
2016-04-26T18:36:01.000Z
|
2017-03-24T02:12:19.000Z
|
import Rank1MatrixCompletion as mc
import numpy as np
import matplotlib.pyplot as plt
# Random Sampling
# Make an NxN rank-1 matrix
N = 100
sampleRate = 0.1
A = np.dot(np.random.randn(N,1),np.random.randn(1,N))
# Make mask with random sampling
mask = np.less(np.random.rand(N,N),sampleRate)
# Zero out matrix where mask is 0
A[np.logical_not(mask)] = 0
f, (ax1, ax2) = plt.subplots(2, sharex=True, sharey=True)
ax1.imshow(A, interpolation="nearest")
ax2.imshow(mask, interpolation="nearest")
plt.show()
[AOut,maskOut] = mc.completeRank1Matrix(np.copy(A),np.copy(mask),True)
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, sharex=True, sharey=True)
ax1.imshow(A, interpolation="nearest", vmin=-3, vmax=3)
ax3.imshow(mask, interpolation="nearest", vmin=-3, vmax=3)
ax2.imshow(AOut, interpolation="nearest", vmin=-3, vmax=3)
ax4.imshow(maskOut, interpolation="nearest", vmin=-3, vmax=3)
plt.show()
# Banded Diagonal Sampling
# Make an NxN rank-1 matrix
N = 50
sampleRate = 0.1
A = np.dot(np.random.randn(N,1),np.random.randn(1,N))
# Make mask with random sampling
mask = np.logical_or(np.diag(np.ones(N,dtype=bool)),np.diag(np.ones(N-1,dtype=bool),k=1))
# Zero out matrix where mask is 0
A[np.logical_not(mask)] = 0
f, (ax1, ax2) = plt.subplots(2, sharex=True, sharey=True)
ax1.imshow(A, interpolation="nearest")
ax2.imshow(mask, interpolation="nearest")
plt.show()
[AOut,maskOut] = mc.completeRank1Matrix(np.copy(A),np.copy(mask),True)
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2,2, sharex=True, sharey=True)
ax1.imshow(A, interpolation="nearest", vmin=-3, vmax=3)
ax3.imshow(mask, interpolation="nearest", vmin=-3, vmax=3)
ax2.imshow(AOut, interpolation="nearest", vmin=-3, vmax=3)
ax4.imshow(maskOut, interpolation="nearest", vmin=-3, vmax=3)
plt.show()
| 33.283019
| 89
| 0.715986
| 300
| 1,764
| 4.2
| 0.213333
| 0.190476
| 0.152381
| 0.15873
| 0.87619
| 0.855556
| 0.855556
| 0.855556
| 0.809524
| 0.809524
| 0
| 0.044192
| 0.102041
| 1,764
| 52
| 90
| 33.923077
| 0.751263
| 0.123583
| 0
| 0.8
| 0
| 0
| 0.054652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.085714
| 0
| 0.085714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
409fc52d1935bfa1b17de4d0ebb37fd4320a1fcd
| 17,481
|
py
|
Python
|
com/vmware/appliance/access_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
com/vmware/appliance/access_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
com/vmware/appliance/access_client.py
|
vishal-12/vsphere-automation-sdk-python
|
9cf363971db77ea5a12928eecd5cf5170a7fcd8a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2019 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.appliance.access.
#---------------------------------------------------------------------------
"""
The ``com.vmware.appliance.access_client`` module provides classes for managing
access to the appliance. The module is available starting in vSphere 6.5.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Consolecli(VapiInterface):
"""
``Consolecli`` class provides methods Get/Set enabled state of CLI.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.access.consolecli'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ConsolecliStub)
def set(self,
enabled,
):
"""
Set enabled state of the console-based controlled CLI (TTY1).
:type enabled: :class:`bool`
:param enabled: Console-based controlled CLI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('set',
{
'enabled': enabled,
})
def get(self):
"""
Get enabled state of the console-based controlled CLI (TTY1).
:rtype: :class:`bool`
:return: Console-based controlled CLI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Dcui(VapiInterface):
"""
``Dcui`` class provides methods Get/Set enabled state of DCUI.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.access.dcui'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DcuiStub)
def set(self,
enabled,
):
"""
Set enabled state of Direct Console User Interface (DCUI TTY2).
:type enabled: :class:`bool`
:param enabled: DCUI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('set',
{
'enabled': enabled,
})
def get(self):
"""
Get enabled state of Direct Console User Interface (DCUI TTY2).
:rtype: :class:`bool`
:return: DCUI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Shell(VapiInterface):
"""
``Shell`` class provides methods Get/Set enabled state of BASH.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.access.shell'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ShellStub)
class ShellConfig(VapiStruct):
"""
``Shell.ShellConfig`` class Structure that defines shell configuration.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
enabled=None,
timeout=None,
):
"""
:type enabled: :class:`bool`
:param enabled: Enabled can be set to true or false
:type timeout: :class:`long`
:param timeout: The timeout (in seconds) specifies how long you enable the Shell
access. The maximum timeout is 86400 seconds(1 day).
"""
self.enabled = enabled
self.timeout = timeout
VapiStruct.__init__(self)
ShellConfig._set_binding_type(type.StructType(
'com.vmware.appliance.access.shell.shell_config', {
'enabled': type.BooleanType(),
'timeout': type.IntegerType(),
},
ShellConfig,
False,
None))
def set(self,
config,
):
"""
Set enabled state of BASH, that is, access to BASH from within the
controlled CLI.
:type config: :class:`Shell.ShellConfig`
:param config: Shell configuration
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('set',
{
'config': config,
})
def get(self):
"""
Get enabled state of BASH, that is, access to BASH from within the
controlled CLI.
:rtype: :class:`Shell.ShellConfig`
:return: Current shell configuration.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Ssh(VapiInterface):
"""
``Ssh`` class provides methods Get/Set enabled state of SSH-based
controlled CLI.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.access.ssh'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SshStub)
def set(self,
enabled,
):
"""
Set enabled state of the SSH-based controlled CLI.
:type enabled: :class:`bool`
:param enabled: SSH-based controlled CLI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('set',
{
'enabled': enabled,
})
def get(self):
"""
Get enabled state of the SSH-based controlled CLI.
:rtype: :class:`bool`
:return: SSH-based controlled CLI is enabled.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class _ConsolecliStub(ApiInterfaceStub):
def __init__(self, config):
# properties for set operation
set_input_type = type.StructType('operation-input', {
'enabled': type.BooleanType(),
})
set_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
set_input_value_validator_list = [
]
set_output_validator_list = [
]
set_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/appliance/access/consolecli',
path_variables={
},
query_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/access/consolecli',
path_variables={
},
query_parameters={
}
)
operations = {
'set': {
'input_type': set_input_type,
'output_type': type.VoidType(),
'errors': set_error_dict,
'input_value_validator_list': set_input_value_validator_list,
'output_validator_list': set_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.BooleanType(),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'set': set_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.access.consolecli',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _DcuiStub(ApiInterfaceStub):
def __init__(self, config):
# properties for set operation
set_input_type = type.StructType('operation-input', {
'enabled': type.BooleanType(),
})
set_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
set_input_value_validator_list = [
]
set_output_validator_list = [
]
set_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/appliance/access/dcui',
path_variables={
},
query_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/access/dcui',
path_variables={
},
query_parameters={
}
)
operations = {
'set': {
'input_type': set_input_type,
'output_type': type.VoidType(),
'errors': set_error_dict,
'input_value_validator_list': set_input_value_validator_list,
'output_validator_list': set_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.BooleanType(),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'set': set_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.access.dcui',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _ShellStub(ApiInterfaceStub):
def __init__(self, config):
# properties for set operation
set_input_type = type.StructType('operation-input', {
'config': type.ReferenceType(__name__, 'Shell.ShellConfig'),
})
set_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
set_input_value_validator_list = [
]
set_output_validator_list = [
]
set_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/appliance/access/shell',
path_variables={
},
query_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/access/shell',
path_variables={
},
query_parameters={
}
)
operations = {
'set': {
'input_type': set_input_type,
'output_type': type.VoidType(),
'errors': set_error_dict,
'input_value_validator_list': set_input_value_validator_list,
'output_validator_list': set_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Shell.ShellConfig'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'set': set_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.access.shell',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _SshStub(ApiInterfaceStub):
def __init__(self, config):
# properties for set operation
set_input_type = type.StructType('operation-input', {
'enabled': type.BooleanType(),
})
set_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
set_input_value_validator_list = [
]
set_output_validator_list = [
]
set_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/appliance/access/ssh',
path_variables={
},
query_parameters={
}
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/access/ssh',
path_variables={
},
query_parameters={
}
)
operations = {
'set': {
'input_type': set_input_type,
'output_type': type.VoidType(),
'errors': set_error_dict,
'input_value_validator_list': set_input_value_validator_list,
'output_validator_list': set_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.BooleanType(),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'set': set_rest_metadata,
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.access.ssh',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class StubFactory(StubFactoryBase):
_attrs = {
'Consolecli': Consolecli,
'Dcui': Dcui,
'Shell': Shell,
'Ssh': Ssh,
}
| 32.075229
| 92
| 0.559121
| 1,681
| 17,481
| 5.53599
| 0.1047
| 0.067054
| 0.033527
| 0.041264
| 0.808833
| 0.792714
| 0.77896
| 0.769611
| 0.737159
| 0.709435
| 0
| 0.001458
| 0.332818
| 17,481
| 544
| 93
| 32.134191
| 0.79645
| 0.20754
| 0
| 0.630058
| 1
| 0
| 0.163585
| 0.109057
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049133
| false
| 0
| 0.034682
| 0
| 0.150289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40b9001f66e09350ebe70a259993b8cb4ace6616
| 3,043
|
py
|
Python
|
tests/pytest/fft_tests/test_fft_input.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | 11
|
2020-07-31T02:21:55.000Z
|
2022-03-10T03:12:11.000Z
|
tests/pytest/fft_tests/test_fft_input.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/pytest/fft_tests/test_fft_input.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
import functools # NOQA
import unittest
import pytest # NOQA
import numpy
import numpy as np # NOQA
import nlcpy
from nlcpy import testing
@testing.parameterize(*testing.product({
'a': [
[1, 2, 3, 4, 5],
(1, 2, 3),
range(10),
bytearray(b'abc'),
memoryview(b'abc'),
numpy.asarray([1, 2]),
nlcpy.asarray([1, 2]),
[True, False],
[1, 2, 3],
[2.3, 4.5],
[3. + 0.1j, 4. + 0.2j],
[[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]],
[(1, 2, 3), (1, 2, 3)],
[range(10), range(10)],
[bytearray(b'abc'), bytearray(b'abc')],
[memoryview(b'abc'), memoryview(b'abc')],
[numpy.asarray([1, 2]), numpy.asarray([1, 2])],
[nlcpy.asarray([1, 2]), nlcpy.asarray([1, 2])],
(True, False),
(1, 2, 3),
(2.3, 4.5),
(3. + 0.1j, 4. + 0.2j),
([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]),
((1, 2, 3), (1, 2, 3)),
(range(10), range(10)),
(bytearray(b'abc'), bytearray(b'abc')),
(memoryview(b'abc'), memoryview(b'abc')),
(numpy.asarray([1, 2]), numpy.asarray([1, 2])),
(nlcpy.asarray([1, 2]), nlcpy.asarray([1, 2]))
],
}))
@testing.with_requires('numpy>=1.10.0')
class TestFftInput(unittest.TestCase):
@testing.numpy_nlcpy_allclose(rtol=1e-4, atol=1e-7,
accept_error=ValueError,
contiguous_check=False)
def test_fft(self, xp):
out = xp.fft.fft(self.a)
return out
@testing.parameterize(*testing.product({
'a': [
[1, 2, 3, 4, 5],
(1, 2, 3),
range(10),
bytearray(b'abc'),
memoryview(b'abc'),
numpy.asarray([1, 2]),
nlcpy.asarray([1, 2]),
[True, False],
[1, 2, 3],
[2.3, 4.5],
# [3. + 0.1j, 4. + 0.2j], # numpy.fft.rfft use this. return TypeError.
[[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]],
[(1, 2, 3), (1, 2, 3)],
[range(10), range(10)],
[bytearray(b'abc'), bytearray(b'abc')],
[memoryview(b'abc'), memoryview(b'abc')],
[numpy.asarray([1, 2]), numpy.asarray([1, 2])],
[nlcpy.asarray([1, 2]), nlcpy.asarray([1, 2])],
(True, False),
(1, 2, 3),
(2.3, 4.5),
# (3. + 0.1j, 4. + 0.2j), # numpy.fft.rfft use this. return TypeError.
([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]),
((1, 2, 3), (1, 2, 3)),
(range(10), range(10)),
(bytearray(b'abc'), bytearray(b'abc')),
(memoryview(b'abc'), memoryview(b'abc')),
(numpy.asarray([1, 2]), numpy.asarray([1, 2])),
(nlcpy.asarray([1, 2]), nlcpy.asarray([1, 2]))
],
}))
@testing.with_requires('numpy>=1.10.0')
class TestRfftInput(unittest.TestCase):
@testing.numpy_nlcpy_allclose(rtol=1e-4, atol=1e-7,
accept_error=ValueError,
contiguous_check=False)
def _test_rfft(self, xp):
out = xp.fft.rfft(self.a)
return out
| 30.737374
| 79
| 0.467959
| 425
| 3,043
| 3.32
| 0.136471
| 0.062367
| 0.051028
| 0.039688
| 0.87314
| 0.853296
| 0.853296
| 0.853296
| 0.853296
| 0.853296
| 0
| 0.099569
| 0.313506
| 3,043
| 98
| 80
| 31.05102
| 0.575874
| 0.049622
| 0
| 0.827586
| 0
| 0
| 0.030513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022989
| false
| 0
| 0.08046
| 0
| 0.149425
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40d75528c0577bc7d64f4d85d7514f0c5d3b052f
| 1,819
|
py
|
Python
|
play/EraPostgresProvision/scripts/Substrate_Era_PostgreSQL_DB_Action___pre_create___Task__2GetProfileIDs.py
|
halsayed/calm
|
46c93ac2b02227663f0184d149f62d142b2638cc
|
[
"MIT"
] | null | null | null |
play/EraPostgresProvision/scripts/Substrate_Era_PostgreSQL_DB_Action___pre_create___Task__2GetProfileIDs.py
|
halsayed/calm
|
46c93ac2b02227663f0184d149f62d142b2638cc
|
[
"MIT"
] | null | null | null |
play/EraPostgresProvision/scripts/Substrate_Era_PostgreSQL_DB_Action___pre_create___Task__2GetProfileIDs.py
|
halsayed/calm
|
46c93ac2b02227663f0184d149f62d142b2638cc
|
[
"MIT"
] | 1
|
2021-11-16T10:28:42.000Z
|
2021-11-16T10:28:42.000Z
|
# Set creds and headers
era_user = '@@{era_creds.username}@@'
era_pass = '@@{era_creds.secret}@@'
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
# Get Software Profile ID
url = "https://@@{era_ip}@@:8443/era/v0.8/profiles?type=Software&name=@@{software_profile}@@"
resp = urlreq(url, verb='GET', auth='BASIC', user=era_user, passwd=era_pass, headers=headers)
if resp.ok:
print "SOFTWARE_PROF_ID={0}".format(json.loads(resp.content)['id'])
else:
print "Get Software Profile ID request failed", json.dumps(json.loads(resp.content), indent=4)
exit(1)
# Get Compute Profile ID
url = "https://@@{era_ip}@@:8443/era/v0.8/profiles?type=Compute&name=@@{compute_profile}@@"
resp = urlreq(url, verb='GET', auth='BASIC', user=era_user, passwd=era_pass, headers=headers)
if resp.ok:
print "COMPUTE_PROF_ID={0}".format(json.loads(resp.content)['id'])
else:
print "Get Compute Profile ID request failed", json.dumps(json.loads(resp.content), indent=4)
exit(1)
# Get Network Profile ID
url = "https://@@{era_ip}@@:8443/era/v0.8/profiles?type=Network&name=@@{network_profile}@@"
resp = urlreq(url, verb='GET', auth='BASIC', user=era_user, passwd=era_pass, headers=headers)
if resp.ok:
print "NETWORK_PROF_ID={0}".format(json.loads(resp.content)['id'])
else:
print "Get Network Profile ID request failed", json.dumps(json.loads(resp.content), indent=4)
exit(1)
# Get DB Parameter ID
url = "https://@@{era_ip}@@:8443/era/v0.8/profiles?type=Database_Parameter&name=@@{database_parameter}@@"
resp = urlreq(url, verb='GET', auth='BASIC', user=era_user, passwd=era_pass, headers=headers)
if resp.ok:
print "DB_PARAM_ID={0}".format(json.loads(resp.content)['id'])
else:
print "Get DB Parameter ID request failed", json.dumps(json.loads(resp.content), indent=4)
exit(1)
| 45.475
| 109
| 0.706432
| 283
| 1,819
| 4.438163
| 0.187279
| 0.057325
| 0.082803
| 0.127389
| 0.754777
| 0.754777
| 0.754777
| 0.754777
| 0.754777
| 0.754777
| 0
| 0.022018
| 0.101154
| 1,819
| 40
| 110
| 45.475
| 0.746177
| 0.061023
| 0
| 0.516129
| 0
| 0.129032
| 0.412801
| 0.027011
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.16129
| 0
| null | null | 0.258065
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
40fcdbde6486a6e3ef8d2f99e680385ed5a4507d
| 2,388
|
py
|
Python
|
custom_components/ble_monitor/test/test_altbeacon_parser.py
|
Rongronggg9/ble_monitor
|
7a8cfc6d934909249b78268a333e3414a5270214
|
[
"MIT"
] | null | null | null |
custom_components/ble_monitor/test/test_altbeacon_parser.py
|
Rongronggg9/ble_monitor
|
7a8cfc6d934909249b78268a333e3414a5270214
|
[
"MIT"
] | null | null | null |
custom_components/ble_monitor/test/test_altbeacon_parser.py
|
Rongronggg9/ble_monitor
|
7a8cfc6d934909249b78268a333e3414a5270214
|
[
"MIT"
] | null | null | null |
'''The tests for the AltBeacon ble_parser.'''
from ble_monitor.ble_parser import BleParser
from uuid import UUID
class TestAltBeacon:
'''Tests for the AltBeacon parser'''
def test_altbeacon_sensor(self):
'''Test AltBeacon parser only sensor '''
data_string = '043E280201020105988527406D1C1BFFFFFFBEACD3162F5AF3EE494799DB09756062D0FC005A0005C400D4'
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser()
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg['type'] == 'AltBeacon'
assert sensor_msg['packet'] == 'no packet id'
assert sensor_msg['firmware'] == 'AltBeacon'
assert sensor_msg['manufacturer'] == 'Other'
assert sensor_msg['rssi'] == -44
assert sensor_msg['mac'] == '6D:40:27:85:98:05'
assert str(UUID(sensor_msg['uuid'])) == 'd3162f5a-f3ee-4947-99db-09756062d0fc'
assert sensor_msg['uuid'] == 'd3162f5af3ee494799db09756062d0fc'
assert sensor_msg['major'] == 90
assert sensor_msg['minor'] == 5
assert sensor_msg['measured power'] == -60
assert tracker_msg is None
def test_altbeacon_tracker(self):
'''Test AltBeacon parser only tracker '''
data_string = '043E280201020105988527406D1C1BFFFFFFBEACD3162F5AF3EE494799DB09756062D0FC005A0005C400D4'
data = bytes(bytearray.fromhex(data_string))
# pylint: disable=unused-variable
ble_parser = BleParser(tracker_whitelist=[bytearray.fromhex('d3162f5af3ee494799db09756062d0fc')])
sensor_msg, tracker_msg = ble_parser.parse_data(data)
assert sensor_msg['type'] == 'AltBeacon'
assert sensor_msg['packet'] == 'no packet id'
assert sensor_msg['firmware'] == 'AltBeacon'
assert sensor_msg['manufacturer'] == 'Other'
assert sensor_msg['rssi'] == -44
assert sensor_msg['mac'] == '6D:40:27:85:98:05'
assert str(UUID(sensor_msg['uuid'])) == 'd3162f5a-f3ee-4947-99db-09756062d0fc'
assert sensor_msg['uuid'] == 'd3162f5af3ee494799db09756062d0fc'
assert sensor_msg['major'] == 90
assert sensor_msg['minor'] == 5
assert sensor_msg['measured power'] == -60
assert tracker_msg['tracker_id'] == b'\xd3\x16/Z\xf3\xeeIG\x99\xdb\tu`b\xd0\xfc'
assert sensor_msg is not None
| 48.734694
| 110
| 0.671692
| 266
| 2,388
| 5.849624
| 0.289474
| 0.144602
| 0.202442
| 0.061697
| 0.791774
| 0.757069
| 0.757069
| 0.757069
| 0.757069
| 0.757069
| 0
| 0.139241
| 0.20603
| 2,388
| 49
| 111
| 48.734694
| 0.681435
| 0.086265
| 0
| 0.736842
| 0
| 0.026316
| 0.29292
| 0.176307
| 0
| 0
| 0
| 0
| 0.657895
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
908930ed6d2343538b80ed1e3ed2b5db3ce82c20
| 5,276
|
py
|
Python
|
sdk/python/pulumi_gcp/servicedirectory/_inputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/servicedirectory/_inputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/servicedirectory/_inputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = [
'NamespaceIamBindingConditionArgs',
'NamespaceIamMemberConditionArgs',
'ServiceIamBindingConditionArgs',
'ServiceIamMemberConditionArgs',
]
@pulumi.input_type
class NamespaceIamBindingConditionArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class NamespaceIamMemberConditionArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class ServiceIamBindingConditionArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class ServiceIamMemberConditionArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
| 30.321839
| 87
| 0.641774
| 568
| 5,276
| 5.802817
| 0.116197
| 0.133495
| 0.152913
| 0.080097
| 0.843447
| 0.843447
| 0.843447
| 0.843447
| 0.843447
| 0.843447
| 0
| 0.000247
| 0.232373
| 5,276
| 173
| 88
| 30.49711
| 0.81358
| 0.033548
| 0
| 0.888889
| 1
| 0
| 0.085215
| 0.023954
| 0
| 0
| 0
| 0
| 0
| 1
| 0.207407
| false
| 0
| 0.037037
| 0.088889
| 0.362963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
908d8183e03844f4dbfdbbab7d53b49c3167c4df
| 296
|
py
|
Python
|
tests/test_smoke.py
|
orbitvu/django-cas-ng
|
d887bfa7699e81a2952848f9557c1495d1942b82
|
[
"MIT"
] | null | null | null |
tests/test_smoke.py
|
orbitvu/django-cas-ng
|
d887bfa7699e81a2952848f9557c1495d1942b82
|
[
"MIT"
] | null | null | null |
tests/test_smoke.py
|
orbitvu/django-cas-ng
|
d887bfa7699e81a2952848f9557c1495d1942b82
|
[
"MIT"
] | null | null | null |
from django_cas_ng.backends import *
from django_cas_ng.decorators import *
from django_cas_ng.middleware import *
from django_cas_ng.models import *
from django_cas_ng.views import *
def test_nothing_is_on_fire():
# Nothing to do here, this file is used for testing import works.
pass
| 26.909091
| 69
| 0.790541
| 49
| 296
| 4.489796
| 0.530612
| 0.227273
| 0.295455
| 0.340909
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155405
| 296
| 10
| 70
| 29.6
| 0.88
| 0.212838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0.142857
| 0.714286
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
90d2b37d3997b4aea31344e5095938c6e6e1f45e
| 171,364
|
py
|
Python
|
code/pyorg/spatial/plane.py
|
anmartinezs/pyseg_system
|
5bb07c7901062452a34b73f376057cabc15a13c3
|
[
"Apache-2.0"
] | 12
|
2020-01-08T01:33:02.000Z
|
2022-03-16T00:25:34.000Z
|
code/pyorg/spatial/plane.py
|
anmartinezs/pyseg_system
|
5bb07c7901062452a34b73f376057cabc15a13c3
|
[
"Apache-2.0"
] | 8
|
2019-12-19T19:34:56.000Z
|
2022-03-10T10:11:28.000Z
|
code/pyorg/spatial/plane.py
|
anmartinezs/pyseg_system
|
5bb07c7901062452a34b73f376057cabc15a13c3
|
[
"Apache-2.0"
] | 2
|
2022-03-30T13:12:22.000Z
|
2022-03-30T18:12:10.000Z
|
"""
Classes for doing the spatial analysis of clouds of points in a 2D plane
# Author: Antonio Martinez-Sanchez (Max Planck Institute for Biochemistry)
# Date: 12.06.15
"""
__author__ = 'martinez'
import sys
import vtk
import shutil
from pyorg.globals import *
from .variables import *
# from globals import FilVisitor2
from abc import *
import matplotlib.pyplot as plt
from skimage.morphology import convex_hull_image
from matplotlib.pyplot import cm
from scipy.signal import butter, lfilter
from pyorg import pexceptions
try:
import pickle as pickle
except:
import pickle
##### PACKAGE VARIABLES
LP_ORDER = 5
LP_NORM_CUTOFF = 0.3
#### PACKAGE FUNCTION
# Convert a 3D bounding box into a 2D one
# bbox_3d: input list/tuple/array with the 3D bounding box [x_min, y_min, z_min, x_max, y_max, z_max]
# coord: coordinate to delete: 0, 1, 2 (default)
def make_plane_box(box_3d, coord=2):
bbox_2d = np.zeros(shape=4, dtype=np.float)
if coord == 0:
bbox_2d[0] = box_3d[1]
bbox_2d[1] = box_3d[2]
bbox_2d[2] = box_3d[4]
bbox_2d[3] = box_3d[5]
elif coord == 1:
bbox_2d[0] = box_3d[0]
bbox_2d[1] = box_3d[2]
bbox_2d[2] = box_3d[3]
bbox_2d[3] = box_3d[5]
else:
bbox_2d[0] = box_3d[1]
bbox_2d[1] = box_3d[2]
bbox_2d[2] = box_3d[4]
bbox_2d[3] = box_3d[5]
return bbox_2d
# Convert a 3D cloud of points into a 2D one
# cloud_3d: input 3D array with point coordinates [n, 3]
# coord: coordinate to delete: 0, 1, 2 (default)
def make_plane(cloud_3d, coord=2):
cloud_2d = np.zeros(shape=(cloud_3d.shape[0], 2), dtype=np.float)
if coord == 0:
cloud_2d[:, 0] = cloud_3d[:, 1]
cloud_2d[:, 1] = cloud_3d[:, 2]
elif coord == 1:
cloud_2d[:, 0] = cloud_3d[:, 0]
cloud_2d[:, 1] = cloud_3d[:, 2]
else:
cloud_2d[:, 0] = cloud_3d[:, 0]
cloud_2d[:, 1] = cloud_3d[:, 1]
return cloud_2d
# Generates a random set of points [n, 2] in a plane
# n: number of points
# box: bounding box [x_min, y_min, x_max, y_max]abc
def gen_rand_cloud(n, box):
cloud = np.random.rand(n, 2)
cloud[:, 0] = (box[2] - box[0]) * cloud[:, 0] + box[0]
cloud[:, 1] = (box[3] - box[1]) * cloud[:, 1] + box[1]
return cloud
# Computes Nearest Neighbour Distance of a cloud of points in a Euclidean space
# cloud: array with point coordinates [n, 2]
def nnde(cloud):
dists = np.zeros(shape=cloud.shape[0], dtype=np.float)
for i in range(len(dists)):
hold = cloud[i] - cloud
hold = np.sum(hold*hold, axis=1)
hold[i] = np.inf
dists[i] = math.sqrt(np.min(hold))
return dists
# Computes the Crossed Nearest Neighbour Distance of a cloud of points to another
# in a Euclidean space
# cloud: array with point coordinates [n, 2]
# cloud_ref: reference array with point coordinates [n, 2]
def cnnde(cloud, cloud_ref):
dists = np.zeros(shape=cloud.shape[0], dtype=np.float)
for i in range(len(dists)):
hold = cloud[i] - cloud_ref
hold = np.sum(hold*hold, axis=1)
dists[i] = math.sqrt(np.min(hold))
return dists
# Compute Cumulative Density Function from a one-dimensional array of random samples
# var: array of stochastic samples
# n: number of samples for cdf, if n is a sequence it defines the bin edges, including rightmost edge
# Returns: cdf values and samples respectively
def compute_cdf(var, n):
hist, x = np.histogram(var, bins=n+1, normed=True)
dx = x[1] - x[0]
# Compute CDF, last value is discarded because its unaccuracy and first one is set to zero
hold_cum = np.cumsum(hist)*dx
return hold_cum[:-1], x[:-2]
# Computes the envelope of a stochastic function
# funcs: matrix where rows every independent simulation
# per: percentile for the envelope, default is 50 (median)
def func_envelope(funcs, per=50):
return np.percentile(funcs, per, axis=1)
# Delete repeated points (closer each other then eps) and leaves just one coordinate (median)
# cloud: array with coordinates
# eps: maximum precision
def purge_repeat_coords(cloud, eps):
# Initialization
lut_del = np.zeros(shape=cloud.shape[0], dtype=np.bool)
surv = list()
for i, point in enumerate(cloud):
if not lut_del[i]:
hold = cloud[i] - cloud
hold = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where(hold < eps)[0]
lut_del[ids] = True
surv.append(cloud[ids, :].mean(axis=0))
return np.asarray(surv, dtype=np.float)
# Delete repeated points (coordinates and ids) (closer each other then eps) and leaves just one coordinate (median)
# cloud: array with coordinates
# eps: maximum precision
def purge_repeat_coords2(cloud, cloud_ids, eps):
# Initialization
lut_del = np.zeros(shape=cloud.shape[0], dtype=np.bool)
surv = list()
surv_ids = list()
for i, point in enumerate(cloud):
if not lut_del[i]:
hold = cloud[i] - cloud
hold = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where(hold < eps)[0]
lut_del[ids] = True
surv.append(cloud[ids, :].mean(axis=0))
surv_ids.append(cloud_ids[ids[0]])
return np.asarray(surv, dtype=np.float), np.asarray(surv_ids)
# Merge two boxes by intersection
def merge_boxes_2D(box_a, box_b):
box = np.zeros(shape=4, dtype=np.float)
if box_a[0] > box_b[0]:
box[0] = box_a[0]
else:
box[0] = box_b[0]
if box_a[1] > box_b[1]:
box[1] = box_a[1]
else:
box[1] = box_b[1]
if box_a[2] < box_b[2]:
box[2] = box_a[2]
else:
box[2] = box_b[2]
if box_a[3] < box_b[3]:
box[3] = box_a[3]
else:
box[3] = box_b[3]
return box
# Edge compensation as Goreaud specifies [J. Vegetation Sci. 10: 433-438, 1999]
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def ripley_goreaud(cloud, box, n, max_d):
# Initialization
pi_2 = 2 * np.pi
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(cloud.shape[0])
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
if r == 0:
continue
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where((dists > 0) & (dists < r))[0]
# Loop for neighbours
p = cloud[i, :]
weights = np.ones(shape=len(ids), dtype=np.float)
# Distance to edges
hold_dists = list()
hold_dists.append(box[2] - p[0])
hold_dists.append(p[1] - box[1])
hold_dists.append(p[0] - box[0])
hold_dists.append(box[3] - p[1])
hold_dists = np.asarray(hold_dists, dtype=np.float)
hold_dists = np.sqrt(hold_dists * hold_dists)
hold_dists = np.sort(hold_dists)
d1, d2, d3, d4 = hold_dists[0], hold_dists[1], hold_dists[2], hold_dists[3]
for j, idx in enumerate(ids):
# Compute distance to neighbour
pn = cloud[idx, :]
hold_r = p - pn
rj = math.sqrt((hold_r * hold_r).sum())
#### Edge compensation
# Switch for computing angle
if (rj > d1) and (rj <= d2) and (rj <= d3) and (rj <= d4):
alpha = 2 * math.acos(d1 / rj)
elif (rj > d1) and (rj > d2) and (rj <= d3) and (rj <= d4):
dh = d1*d1 + d2*d2
r2 = rj * rj
if r2 <= dh:
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj)
else:
alpha = .5*np.pi + math.acos(d1/r) + math.acos(d2/r)
elif (rj > d1) and (rj > d3) and (rj <= d2) and (rj <= d4):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d3/rj)
elif (rj > d1) and (rj > d2) and (rj > d3) and (rj <= d4):
d12 = d1*d1 + d2*d2
d23 = d2*d2 + d3*d3
r2 = rj * rj
if (r2 <= d12) and (r2 <= d23):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj) + 2*math.acos(d3/rj)
elif (r2 <= d12) and (r2 > d23):
alpha = .5*np.pi + 2*math.acos(d1/rj) + math.acos(d2/rj) + math.acos(d3/rj)
else:
alpha = np.pi + math.acos(d1/rj) + math.acos(d3/rj)
else:
alpha = .0
# Correcting factor
if alpha > pi_2:
weights[j] = 0.
else:
weights[j] = pi_2 / (pi_2 - alpha)
# Updating K entry
K[k] += (weights.sum())
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
###########################################################################################
# Abstract class for doing the spatial analysis
###########################################################################################
class SpA(object, metaclass=ABCMeta):
# For Abstract Base Classes in python
def __init__(self, n_samp, n_sim_f, p_f, n_sim_r, r_max, r_bord, p_h):
self.__n = n_samp
self.__nsim_f, self.__p_f = n_sim_f, p_f
self.__nsim_r, self.__r_max, self.__p_h = n_sim_r, r_max, p_h
self.__r_bord = 0
if (r_bord == 0) or (r_bord == 1) or (r_bord == 2):
self.__r_bord = r_bord
self.__clouds = list()
self.__boxes = list()
self.__dens = list()
self.__hsim = False
self.__g = np.zeros(shape=n_samp, dtype=np.float)
self.__gx = np.linspace(0., 1., self.__n)
self.__grl = np.zeros(shape=n_samp, dtype=np.float)
self.__grm = np.zeros(shape=n_samp, dtype=np.float)
self.__grh = np.zeros(shape=n_samp, dtype=np.float)
self.__f = np.zeros(shape=n_samp, dtype=np.float)
self.__fx = np.linspace(0., 1., self.__n)
self.__frl = np.zeros(shape=n_samp, dtype=np.float)
self.__frm = np.zeros(shape=n_samp, dtype=np.float)
self.__frh = np.zeros(shape=n_samp, dtype=np.float)
self.__h = list()
self.__hx = list()
self.__hrl = np.zeros(shape=n_samp, dtype=np.float)
self.__hrm = np.zeros(shape=n_samp, dtype=np.float)
self.__hrh = np.zeros(shape=n_samp, dtype=np.float)
self.__cards = list()
# Get/Set functionality
def get_function_G(self):
return self.__g, self.__gx, self.__grl, self.__grm, self.__grh
def get_function_F(self):
return self.__f, self.__fx, self.__frl, self.__frm, self.__frh
def get_ripley_H(self):
return self.__h, self.__hx, self.__hrl, self.__hrm, self.__hrh
# External implemented functionality
# Computes G-Function, F-Function and Ripley's H
# h_sim: if True (default) random simulation for Ripleys'H is generated
# r_acc: if True (default False) all Ripley's graphs are weighted to one
def analyze(self, h_sim=True, verbose=False, r_acc=True):
if verbose:
sys.stdout.write('Progress: 0% ... ')
# G-Function
self.__g, self.__gx = self.__function_G(self.__n)
if verbose:
sys.stdout.write('17% ... ')
self.__grl, self.__grm, self.__grh, _ = self.__rand_function_G(self.__n, self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('33% ... ')
# F-Function
self.__f, self.__fx = self.__function_F(self.__n, self.__nsim_f)
if verbose:
sys.stdout.write('50% ... ')
self.__frl, self.__frm, self.__frh, _ = self.__rand_function_F(self.__n, self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('67% ... ')
# Ripley's H
if r_acc:
self.__ripleys_H(self.__n, self.__r_max, self.__r_bord)
else:
self.__ripleys_H_test(self.__n, self.__r_max, self.__r_bord)
if verbose:
sys.stdout.write('83% ... ')
if h_sim:
self.__hsim = True
self.__hrl, self.__hrm, self.__hrh, _ = self.__rand_ripleys_H(self.__n, self.__nsim_r,
self.__r_max, self.__r_bord,
self.__p_h)
if verbose:
print('100%')
# Plot into figures the current analysis state
# block: if True (default False) waits for closing windows for finishing the execution
def plot(self, block=False):
# Initialization
fig_count = 0
width = 0.35
ind = np.arange(len(self.__dens)) - (width*.5)
if block:
plt.ion()
# Plot clouds
for i, cloud in enumerate(self.__clouds):
fig_count += 1
plt.figure(fig_count)
plt.title('Cloud of points ' + str(fig_count))
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__boxes[i][0], self.__boxes[i][2])
plt.ylim(self.__boxes[i][1], self.__boxes[i][3])
if len(self.__cards) <= 0:
plt.scatter(cloud[:, 0], cloud[:, 1])
else:
cax = plt.scatter(cloud[:, 0], cloud[:, 1], c=self.__cards[i], cmap=cm.jet)
plt.colorbar(cax, orientation='horizontal')
# Plot densities
fig_count += 1
plt.figure(fig_count)
plt.title('Points density')
plt.xlabel('Sample')
plt.ylabel('Density (points/nm^2)')
plt.bar(ind, np.asarray(self.__dens, dtype=np.float), width)
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
plt.plot(self.__gx, self.__g, 'b')
plt.plot(self.__gx, self.__grm, 'r')
plt.plot(self.__gx, self.__grl, 'k--')
plt.plot(self.__gx, self.__grh, 'k--')
# Plot F-Function
fig_count += 1
plt.figure(fig_count)
plt.title('F-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('F')
plt.ylim(0, 1)
plt.plot(self.__fx, self.__f, 'b')
plt.plot(self.__fx, self.__frm, 'r')
plt.plot(self.__fx, self.__frl, 'k--')
plt.plot(self.__fx, self.__frh, 'k--')
# Plot Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
color = cm.rainbow(np.linspace(0, 1, len(self.__h)))
idx = np.arange(len(self.__h)) + 1
lines = list()
for (h, hx, c, ids) in zip(self.__h, self.__hx, color, idx):
line, = plt.plot(hx, h, c=c, label=str(ids))
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# plt.plot(self.__hx, self.__h, 'b')
if self.__hsim:
plt.plot(self.__hx, self.__hrm, 'r')
plt.plot(self.__hx, self.__hrl, 'k--')
plt.plot(self.__hx, self.__hrh, 'k--')
# Show
plt.show(block=block)
#### External abstract functionality
@abstractmethod
def insert_cloud(self, cloud, box, clsts=None, mask=None):
self.__clouds.append(cloud)
self.__boxes.append(box)
area = (box[2] - box[0]) * (box[3] - box[1])
if area > 0:
self.__dens.append(cloud.shape[0] / area)
else:
self.__dens.append(0.)
@abstractmethod
def pickle(self, fname):
raise NotImplementedError('pickle() (SpA). '
'Abstract method, it requires an implementation.')
#### Internal implemented functionality
# Computes Ripley's function in H form
# n: number of samples
# max_d: max distance for being considered
# border: if 0 (default) border compensation is not active, 1 points inflation mode, 2 Goreaud
# Returns: Ripley's K values and samples respectively
def __ripleys_H(self, n, max_d, border=0):
# Initialization
rips = np.zeros(shape=(n, len(self.__clouds)), dtype=np.float)
rd = np.zeros(shape=n, dtype=np.float)
# Ripleys K factors computation
area = 0
weights = np.zeros(shape=len(self.__clouds), dtype=np.float)
for i, cloud in enumerate(self.__clouds):
box = self.__boxes[i]
area_h = float((box[2] - box[0]) * (box[3] - box[1]))
if max_d > math.sqrt(area_h*.5):
print(WRN_RED + 'Warning (ripleys_H): cloud area small compared with maximum distance')
weights[i] = area_h
area += area_h
if border == 1:
# Inflate point cloud
cloud_inf = self.__inflate_2D(cloud)
rips[:, i], rd = self.__ripley(cloud_inf, box, n, max_d)
elif border == 2:
rips[:, i], rd = self.__ripley_goreaud(cloud, box, n, max_d)
else:
rips[:, i], rd = self.__ripley(cloud, box, n, max_d)
# Cloud weighting according to box area
if area <= 0:
cte = 0
else:
cte = 1 / area
weights *= cte
# Insert to object variable
self.__h.append((weights*rips).sum(axis=1))
self.__hx.append(rd)
# Computes Ripley's function in H and updates the correspondent lists
# n: number of samples
# max_d: max distance for being considered
# border: if 0 (default) border compensation is not active, 1 points inflation mode, 2 Goreaud
# Returns: Ripley's K values and samples respectively
def __ripleys_H_test(self, n, max_d, border=0):
# Initialization
self.__h = list()
self.__hx = list()
# Ripleys H computation
for i, cloud in enumerate(self.__clouds):
box = self.__boxes[i]
if border == 1:
# Inflate point cloud
cloud_inf = self.__inflate_2D(cloud)
hold_h, hold_x = self.__ripley(cloud_inf, box, n, max_d)
elif border == 2:
hold_h, hold_x = self.__ripley_goreaud(cloud, box, n, max_d)
else:
hold_h, hold_x = self.__ripley(cloud, box, n, max_d)
self.__h.append(hold_h)
self.__hx.append(hold_x)
# Computes Ripley's function in H form for CSR
# n: number of samples
# m: number of simulations
# max_d: max distance for being considered
# border: if 0 (default) border compensation is not active, 1 points inflation mode, 2 Goreaud
# p: percentile for computing envelopes (default 5%)
# Returns: Ripley's K 0.05, 0.5 and 0.95 envelopes, and samples respectively
def __rand_ripleys_H(self, n, m, max_d, border=True, p=5):
# Generate random points
rips = np.zeros(shape=(n, m*len(self.__clouds)), dtype=np.float)
cont = 0
rd = np.zeros(shape=n, dtype=np.float)
for i in range(m):
for j, cloud in enumerate(self.__clouds):
box = self.__boxes[j]
cloud_1 = gen_rand_cloud(cloud.shape[0], box)
area_h = float((box[2] - box[0]) * (box[3] - box[1]))
if max_d > math.sqrt(area_h*.5):
print(WRN_RED + 'Warning (rand_ripleys_H): cloud area small compared with maximum distance')
if border == 1:
# Inflate point cloud
cloud_inf = self.__inflate_2D(cloud_1)
rips[:, i], rd = self.__ripley(cloud_inf, box, n, max_d)
elif border == 2:
rips[:, i], rd = self.__ripley_goreaud(cloud_1, box, n, max_d)
else:
rips[:, cont], rd = self.__ripley(cloud_1, box, n, max_d)
cont += 1
# Compute envelopes
env_005 = func_envelope(rips, per=p)
env_05 = func_envelope(rips, per=50)
env_095 = func_envelope(rips, per=100-p)
return env_005, env_05, env_095, rd
def __is_not_closer_to_border(self, p, box, max_d):
# Border distances
hold = p[0] - box[0]
d_1 = math.sqrt(hold * hold)
hold = p[0] - box[2]
d_2 = math.sqrt(hold * hold)
hold = p[1] - box[1]
d_3 = math.sqrt(hold * hold)
hold = p[1] - box[3]
d_4 = math.sqrt(hold * hold)
if (d_1 < max_d) or (d_2 < max_d) or (d_3 < max_d) or (d_4 < max_d):
return False
else:
return True
# Inflates a 2D spatial cloud of points by adding 8 flipped versions of the original data
# in its neighbourhood
def __inflate_2D(self, cloud):
# Flipping
flip_x, flip_y = flip_cloud(cloud, 0), flip_cloud(cloud, 1)
flip_xy = flip_cloud(flip_x, 1)
# Computing bounding box
min_x, min_y, max_x, max_y = cloud[:, 0].min(), cloud[:, 1].min(), \
cloud[:, 0].max(), cloud[:, 1].max()
# Adding neighbours
c_00 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_01 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_02 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_10 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_12 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_20 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_21 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_22 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_00[:, 0], c_00[:, 1] = flip_xy[:, 0] - max_x, flip_xy[:, 1] - max_y
c_01[:, 0], c_01[:, 1] = flip_x[:, 0] - max_x, flip_x[:, 1]
c_02[:, 0], c_02[:, 1] = flip_xy[:, 0] - max_x, flip_xy[:, 1] + max_y
c_10[:, 0], c_10[:, 1] = flip_y[:, 0], flip_y[:, 1] - max_y
c_12[:, 0], c_12[:, 1] = flip_y[:, 0], flip_y[:, 1] + max_y
c_20[:, 0], c_20[:, 1] = flip_xy[:, 0] + max_x, flip_xy[:, 1] - max_y
c_21[:, 0], c_21[:, 1] = flip_x[:, 0] + max_x, flip_x[:, 1]
c_22[:, 0], c_22[:, 1] = flip_xy[:, 0] + max_x, flip_xy[:, 1] + max_y
# Concatenate result
return np.concatenate([c_00, c_01, c_02, c_10, cloud, c_12, c_20, c_21, c_22], axis=0)
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley(self, cloud, box, n, max_d):
# Non-edge correction points detection
hold = (cloud[:, 0] >= box[0]) & (cloud[:, 1] >= box[1]) & \
(cloud[:, 0] <= box[2]) & (cloud[:, 1] <= box[3])
core_ids = np.where(hold)[0]
# Initialization
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(len(core_ids))
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
k_hold = ((dists > 0) & (dists < r)).sum()
# Updating K entry
K[k] += k_hold
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
# Edge compensation as Goreaud specifies [J. Vegetation Sci. 10: 433-438, 1999]
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley_goreaud(self, cloud, box, n, max_d):
# Initialization
pi_2 = 2 * np.pi
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(cloud.shape[0])
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
if r == 0:
continue
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where((dists > 0) & (dists < r))[0]
# Loop for neighbours
p = cloud[i, :]
weights = np.ones(shape=len(ids), dtype=np.float)
# Distance to edges
hold_dists = list()
hold_dists.append(box[2] - p[0])
hold_dists.append(p[1] - box[1])
hold_dists.append(p[0] - box[0])
hold_dists.append(box[3] - p[1])
hold_dists = np.asarray(hold_dists, dtype=np.float)
hold_dists = np.sqrt(hold_dists * hold_dists)
hold_dists = np.sort(hold_dists)
d1, d2, d3, d4 = hold_dists[0], hold_dists[1], hold_dists[2], hold_dists[3]
for j, idx in enumerate(ids):
# Compute distance to neighbour
pn = cloud[idx, :]
hold_r = p - pn
rj = math.sqrt((hold_r * hold_r).sum())
#### Edge compensation
# Switch for computing angle
if (rj > d1) and (rj <= d2) and (rj <= d3) and (rj <= d4):
alpha = 2 * math.acos(d1 / rj)
elif (rj > d1) and (rj > d2) and (rj <= d3) and (rj <= d4):
dh = d1*d1 + d2*d2
r2 = rj * rj
if r2 <= dh:
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj)
else:
alpha = .5*np.pi + math.acos(d1/r) + math.acos(d2/r)
elif (rj > d1) and (rj > d3) and (rj <= d2) and (rj <= d4):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d3/rj)
elif (rj > d1) and (rj > d2) and (rj > d3) and (rj <= d4):
d12 = d1*d1 + d2*d2
d23 = d2*d2 + d3*d3
r2 = rj * rj
if (r2 <= d12) and (r2 <= d23):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj) + 2*math.acos(d3/rj)
elif (r2 <= d12) and (r2 > d23):
alpha = .5*np.pi + 2*math.acos(d1/rj) + math.acos(d2/rj) + math.acos(d3/rj)
else:
alpha = np.pi + math.acos(d1/rj) + math.acos(d3/rj)
else:
alpha = .0
# Correcting factor
if alpha > pi_2:
weights[j] = 0.
else:
weights[j] = pi_2 / (pi_2 - alpha)
# Updating K entry
K[k] += (weights.sum())
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
# Computes G function for the accumulated set of inserted clouds
# n: number of samples for cdf
# Returns: G-Function values and samples respectively
def __function_G(self, n):
# Compute NNDs
dists = list()
for cloud in self.__clouds:
dists += nnde(cloud).tolist()
dists = np.asarray(dists, dtype=np.float)
# CDF
return compute_cdf(dists, n)
#### Internal abstract functionality
# n: number of samples for cdf
# m: number of simulations for cdf
@abstractmethod
def __function_F(self, n, m):
raise NotImplementedError('__function_F() (SpA). '
'Abstract method, it requires an implementation.')
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: samples, F-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
@abstractmethod
def __rand_function_F(self, n, m, p=5):
raise NotImplementedError('__rand_function_F() (SpA). '
'Abstract method, it requires an implementation.')
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: samples, G-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
@abstractmethod
def __rand_function_G(self, n, m, p=5):
raise NotImplementedError('__rand_function_G() (SpA). '
'Abstract method, it requires an implementation.')
###########################################################################################
# Class for doing a spatial analysis from several independent set of points
###########################################################################################
class SetClouds(SpA):
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating F and G functions
# p_f: confidence percentile for F and G functions
# n_sim_r: number of simulations for Ripley's H
# r_max: maximum distance for Ripley's H in nm
# r_bord: if 0 (default) border compensation is not active, 1 points inflation mode and
# 2 Goreaud
# p_h: confidence percentile for Ripleys's H
def __init__(self, n_samp, n_sim_f, p_f, n_sim_r, r_max, r_bord, p_h):
super(SetClouds, self).__init__(n_samp, n_sim_f, p_f, n_sim_r, r_max, r_bord, p_h)
#### Set/Get methods area
#### External functionality area
# cloud: array with point coordinates in a plane [n, 2]
# box: bounding box [x_min, y_min, x_max, y_max]
# cards: array with point cardinalities
def insert_cloud(self, cloud, box, cards):
super(SetClouds, self).insert_cloud(cloud, box)
self._SpA__cards.append(cards)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
#### Internal functionality area
# Computes G function for CSR
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: G-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
def _SpA__rand_function_G(self, n, m, p=5):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self._SpA__clouds)), dtype=np.float)
cont = 0
for i in range(m):
for j, cloud in enumerate(self._SpA__clouds):
hold_dists = nnde(gen_rand_cloud(cloud.shape[0], self._SpA__boxes[j]))
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_005 = func_envelope(cdfs, per=p)
env_05 = func_envelope(cdfs, per=50)
env_095 = func_envelope(cdfs, per=100-p)
return env_005, env_05, env_095, sp
# Computes F function for CSR
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: F-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
def _SpA__rand_function_F(self, n, m, p=5):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self._SpA__clouds)), dtype=np.float)
cont = 0
for i in range(m):
for j, cloud in enumerate(self._SpA__clouds):
cloud_1 = gen_rand_cloud(cloud.shape[0], self._SpA__boxes[j])
cloud_2 = gen_rand_cloud(cloud.shape[0], self._SpA__boxes[j])
hold_dists = cnnde(cloud_1, cloud_2)
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_005 = func_envelope(cdfs, per=p)
env_05 = func_envelope(cdfs, per=50)
env_095 = func_envelope(cdfs, per=100-p)
return env_005, env_05, env_095, sp
# Computes F function for the accumulated set of inserted clouds
# n: number of samples for cdf
# m: number of random simulations
# Returns: F-Function values and samples respectively
def _SpA__function_F(self, n, m):
# Generate random points
dists = list()
for i in range(m):
for j, cloud in enumerate(self._SpA__clouds):
dists += cnnde(cloud, gen_rand_cloud(cloud.shape[0], self._SpA__boxes[j])).tolist()
dists = np.asarray(dists, dtype=np.float)
# CDF
return compute_cdf(dists, n)
###########################################################################################
# Class for doing a spatial analysis from cluster of points
###########################################################################################
class SetClusters(SpA):
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating F and G functions
# p_f: confidence percentile for F and G functions
# n_sim_r: number of simulations for Ripley's H
# r_max: maximum distance for Ripley's H in nm
# r_bord: if 0 (default) border compensation is not active, 1 points inflation mode and
# 2 Goreaud
# p_h: confidence percentile for Ripleys's H
# r_t: number of tries for random clusters generation
def __init__(self, n_samp, n_sim_f, p_f, n_sim_r, r_max, r_bord, p_h, r_t=50):
super(SetClusters, self).__init__(n_samp, n_sim_f, p_f, n_sim_r, r_max, r_bord, p_h)
self.__clsts_l = list()
self.__masks_l = list()
self.__r_t = r_t
#### External functionality area
# cloud: array with point coordinates of clusters centers of gravity in a plane [n, 2]
# box: bounding box [x_min, y_min, x_max, y_max]
# clsts: ordered list with clusters, each clusters is an array of points
# mask: mask where False-values mark invalid regions
def insert_cloud(self, cloud_cg, box, clsts, mask):
super(SetClusters, self).insert_cloud(cloud_cg, box)
self.__clsts_l.append(clsts)
self.__masks_l.append(mask)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
#### Internal functionality area
# Generates a random distribution of the internal clusters
# clsts: list of clusters
# box: bounding box
# mask: binary mask where False valued regions are invalids
# tries: number of tries for getting the less overlapped location for every cluster
# Returns: an array with new centroids
def __get_rand_clsts(self, clsts, box, mask):
# Initialization
n_cgs = np.zeros(shape=(len(clsts), 2), dtype=np.float)
# Loop for clusters
mask_h = np.copy(mask)
for i, c_cloud in enumerate(clsts):
# Translate to base coordinates and computes minimum distance to center of gravity
cg = c_cloud.mean(axis=0)
f_cloud = c_cloud - cg
# Compute valid search areas
dst_t = sp.ndimage.morphology.distance_transform_edt(mask_h)
mask_dst = np.zeros(shape=mask_h.shape, dtype=mask_h.dtype)
mask_dst[dst_t > 0] = True
if (dst_t > 0).sum() <= 0:
error_msg = 'Mask fully overlapped.'
raise pexceptions.PySegTransitionError(expr='__get_rand_clsts (SetClusters)',
msg=error_msg)
# Keep the best try (lower overlapping)
min_ov = MAX_FLOAT
h_cg = None
h_chull = np.zeros(shape=mask_h.shape, dtype=mask_h.dtype)
for c_try in range(self.__r_t):
# Random selection for the new centroid from valid areas
m_ids = np.where(mask_dst)
r_x, r_y = np.random.randint(0, len(m_ids[0])), np.random.randint(0, len(m_ids[1]))
cg_x, cg_y = m_ids[0][r_x], m_ids[1][r_y]
# Rotate randomly against base center [0, 0]
rho = np.random.rand() * (2*np.pi)
sinr, cosr = math.sin(rho), math.cos(rho)
r_cloud = np.zeros(shape=f_cloud.shape, dtype=f_cloud.dtype)
r_cloud[:, 0] = f_cloud[:, 0]*cosr - f_cloud[:, 1]*sinr
r_cloud[:, 1] = f_cloud[:, 0]*sinr + f_cloud[:, 1]*cosr
# Translation to randomly already selected center
n_cg = np.asarray((cg_x, cg_y) , dtype=np.float)
# v = n_cg - cg
t_cloud = r_cloud + n_cg
chull, _ = self.__compute_chull_no_bound(t_cloud, box)
# Update minimum overlap
ov = chull.sum() - (chull * mask_h).sum()
if ov < min_ov:
min_ov = ov
h_cg = n_cg
h_chull = chull
if ov == 0:
break
else:
if h_cg is None:
h_cg = n_cg
# Update mask
mask_h[h_chull] = False
# Get new center transposed
n_cgs[i, 0] = h_cg[1]
n_cgs[i, 1] = h_cg[0]
# plt.ion()
# plt.figure()
# plt.title('Test')
# plt.imshow(mask)
# plt.show()
# plt.figure()
# plt.title('Test 2')
# plt.scatter(n_cgs[:, 0], n_cgs[:, 1])
# plt.show()
return n_cgs
# Returns convex hull and discard points out of bounds are discarded and no exception is
# raised, instead in a second variable a true is returned
def __compute_chull_no_bound(self, c_cloud, box):
# Create holding image
off_x = math.floor(box[1])
off_y = math.floor(box[0])
m, n = math.ceil(box[3]) - off_x + 1, math.ceil(box[2]) - off_y + 1
img = np.zeros(shape=(m, n), dtype=np.bool)
# Filling holding image
hold = np.asarray(np.round(c_cloud), dtype=np.int)
hold[:, 0] -= off_y
hold[:, 1] -= off_x
excep = False
p_count = 0
for p in hold:
try:
img[p[0], p[1]] = True
except IndexError:
excep = True
continue
p_count += 1
# Computing the convex hull
if p_count > 0:
chull = np.asarray(convex_hull_image(img), dtype=np.bool)
else:
chull = img
return chull, excep
# Computes G function for CSR
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: G-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
def _SpA__rand_function_G(self, n, m, p=5):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self.__clsts_l)), dtype=np.float)
cont = 0
for i in range(m):
for j, clsts in enumerate(self.__clsts_l):
hold_dists = nnde(self.__get_rand_clsts(clsts, self._SpA__boxes[j],
self.__masks_l[j]))
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_005 = func_envelope(cdfs, per=p)
env_05 = func_envelope(cdfs, per=50)
env_095 = func_envelope(cdfs, per=100-p)
return env_005, env_05, env_095, sp
# Computes F function for CSR
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes (default 5%)
# Returns: F-Function 0.05, 0.5 and 0.95 envelopes, and samples respectively
def _SpA__rand_function_F(self, n, m, p=5):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self.__clsts_l)), dtype=np.float)
cont = 0
for i in range(m):
for j, clsts in enumerate(self.__clsts_l):
cloud_1 = self.__get_rand_clsts(clsts, self._SpA__boxes[j], self.__masks_l[j])
cloud_2 = self.__get_rand_clsts(clsts, self._SpA__boxes[j], self.__masks_l[j])
hold_dists = cnnde(cloud_1, cloud_2)
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_005 = func_envelope(cdfs, per=p)
env_05 = func_envelope(cdfs, per=50)
env_095 = func_envelope(cdfs, per=100-p)
return env_005, env_05, env_095, sp
# Computes F function for the accumulated set of inserted clouds
# n: number of samples for cdf
# m: number of random simulations
# Returns: F-Function values and samples respectively
def _SpA__function_F(self, n, m):
# Generate random points
dists = list()
for i in range(m):
for j, cloud in enumerate(self._SpA__clouds):
dists += cnnde(cloud, self.__get_rand_clsts(self.__clsts_l[j],
self._SpA__boxes[j],
self.__masks_l[j])).tolist()
dists = np.asarray(dists, dtype=np.float)
# CDF
return compute_cdf(dists, n)
###########################################################################################
# Abstract class for doing the spatial analysis of a set of slices
###########################################################################################
class SlA(object, metaclass=ABCMeta):
# For Abstract Base Classes in python
def __init__(self, box, n_samp, n_sim_f, r_max, r_bord, p_f=None):
self.__box = box
self.__n = n_samp
self.__nsim_f = n_sim_f
self.__r_max = r_max
self.__r_bord = 0
self.__p_f = p_f
if (r_bord == 0) or (r_bord == 1) or (r_bord == 2):
self.__r_bord = r_bord
self.__clouds = list()
self.__dens = list()
self.__g = list()
self.__gx = list()
self.__grm = np.zeros(shape=n_samp, dtype=np.float)
self.__grm1 = np.zeros(shape=n_samp, dtype=np.float)
self.__grm2 = np.zeros(shape=n_samp, dtype=np.float)
self.__grmx = np.zeros(shape=n_samp, dtype=np.float)
self.__f = list()
self.__fx = list()
self.__frm = np.zeros(shape=n_samp, dtype=np.float)
self.__frm1 = np.zeros(shape=n_samp, dtype=np.float)
self.__frm2 = np.zeros(shape=n_samp, dtype=np.float)
self.__frmx = np.zeros(shape=n_samp, dtype=np.float)
self.__h = list()
self.__hx = list()
self.__l = list()
self.__lx = list()
self.__hp = list()
self.__hpx = list()
self.__lp = list()
self.__lpx = list()
self.__cards = list()
self.__srs = list()
# Low pass filter for differentials
b, a = butter(LP_ORDER, LP_NORM_CUTOFF, btype='low', analog=False)
self.__lpf = (b, a)
# Get/Set functionality
def get_box(self):
return self.__box
# Return a cloud coordinates by passing a name (sr string), no if this name is not valid
def get_cloud_by_name(self, name):
try:
idx = self.__srs.index(name)
return self.__clouds[idx]
except ValueError:
return None
def get_clouds_list(self):
return self.__clouds
def get_densities(self):
return np.asarray(self.__dens, dtype=np.float)
def get_function_G(self):
return self.__g, self.__gx, self.__grm
def get_function_F(self):
return self.__f, self.__fx, self.__frm
def get_ripley_H(self):
return self.__h, self.__hx
def get_ripley_Hp(self):
return self.__hp, self.__hpx
def get_ripley_L(self):
return self.__l, self.__lx
def get_ripley_Lp(self):
return self.__lp, self.__lpx
def get_slice_ranges(self):
return self.__srs
# External implemented functionality
# Computes G-Function, F-Function and Ripley's H
def analyze(self, verbose=False):
if verbose:
sys.stdout.write('Progress: 0% ... ')
# G-Function
self.__function_G(self.__n)
if verbose:
sys.stdout.write('20% ... ')
if self.__nsim_f > 0:
self.__grmx, self.__grm, self.__grm1, self.__grm2 = self.__rand_function_G(self.__n, self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('40% ... ')
# F-Function
self.__function_F(self.__n, self.__nsim_f)
if verbose:
sys.stdout.write('60% ... ')
if self.__nsim_f > 0:
self.__frmx, self.__frm, self.__frm1, self.__frm2 = self.__rand_function_F(self.__n, self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('80% ... ')
# Ripley's metrics
self.__ripleys_H_test(self.__n, self.__r_max, self.__r_bord)
self.__ripleys_L()
self.__ripleys_Hp()
self.__ripleys_Lp()
if verbose:
print('100%')
# Plot into figures the current analysis state
# block: if True (default False) waits for closing windows for finishing the execution
# cloud_over: if True (default) all clouds are plot in the same figure
# fourier: it True (default) the fourier analysis is also plotted
# l_metric: it True (default False) the Ripley's L metric is computed
# r_stat: it True (default False) Ripley's H statistics are measured
def plot(self, block=False, cloud_over=True, fourier=True, l_metric=False, r_stat=False):
# Initialization
fig_count = 0
if block:
plt.ion()
labels = self.__srs
ind = np.arange(1, len(labels)+1)
color = cm.rainbow(np.linspace(0, 1, len(self.__srs)))
# Plot clouds
if cloud_over:
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for i, cloud in enumerate(self.__clouds):
if cloud.shape[0] > 0:
if (len(self.__cards) <= 0) or (self.__cards[i] is None):
plt.scatter(cloud[:, 0], cloud[:, 1], c=color[i])
else:
cax = plt.scatter(cloud[:, 0], cloud[:, 1], c=self.__cards[i], cmap=cm.jet)
plt.colorbar(cax, orientation='horizontal')
else:
for i, cloud in enumerate(self.__clouds):
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points ' + labels[i])
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
if cloud.shape[0] > 0:
if (len(self.__cards) <= 0) or (self.__cards[i] is None):
plt.scatter(cloud[:, 0], cloud[:, 1])
else:
cax = plt.scatter(cloud[:, 0], cloud[:, 1], c=self.__cards[i], cmap=cm.jet)
plt.colorbar(cax, orientation='horizontal')
# Plot densities
fig_count += 1
plt.figure(fig_count)
plt.title('Points density')
plt.xlabel('Sample')
plt.ylabel('Density (points/nm^2)')
plt.xlim(ind[0]-1, ind[-1]+1)
plt.stem(ind, np.asarray(self.__dens, dtype=np.float))
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__grmx, self.__grm, 'k')
if self.__p_f is not None:
plt.plot(self.__grmx, self.__grm1, 'k--')
plt.plot(self.__grmx, self.__grm2, 'k--')
lines = list()
for (g, gx, c, lbl) in zip(self.__g, self.__gx, color, labels):
line, = plt.plot(gx, g, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# Plot F-Function
fig_count += 1
plt.figure(fig_count)
plt.title('F-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('F')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__frmx, self.__frm, 'k')
if self.__p_f is not None:
plt.plot(self.__frmx, self.__frm1, 'k--')
plt.plot(self.__frmx, self.__frm2, 'k--')
lines = list()
for (f, fx, c, lbl) in zip(self.__f, self.__fx, color, labels):
line, = plt.plot(fx, f, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# Plot Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__h, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__h) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__h[0])), 'k--')
# Plot Ripley's H Fourier analysis
if fourier:
# Compute FFT
hfs = list()
freqs = list()
for (h, hx) in zip(self.__h, self.__hx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(h), hx[1] - hx[0])))
hfs.append(np.fft.fftshift(np.fft.fft(h)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley H Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.real(hf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.imag(hf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.abs(hf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.angle(hf), c=c, label=lbl)
# Plot Ripley's L
if l_metric:
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley L')
plt.xlabel('Radius (nm)')
plt.ylabel('L')
for (l, lx, c, lbl) in zip(self.__l, self.__lx, color, labels):
plt.plot(lx, l, c=c, label=lbl)
if len(self.__l) > 0:
plt.plot(self.__lx[0], self.__lx[0], 'k--')
# Plot Ripley's L Fourier analysis
if fourier:
# Compute FFT
lfs = list()
freqs = list()
for (l, lx) in zip(self.__l, self.__lx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(l), lx[1] - lx[0])))
lfs.append(np.fft.fftshift(np.fft.fft(l)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley L Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.real(lf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.imag(lf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.abs(lf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.angle(lf), c=c, label=lbl)
# Plot Ripley's H'
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley H first derivative')
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
for (hp, hpx, c, lbl) in zip(self.__hp, self.__hpx, color, labels):
plt.plot(hpx, hp, c=c, label=lbl)
if len(self.__hp) > 0:
plt.plot(self.__hpx[0], np.zeros(shape=len(self.__hp[0])), 'k--')
# Plot Ripley's H' Fourier analysis
if fourier:
# Compute FFT
hpfs = list()
freqs = list()
for (hp, hpx) in zip(self.__hp, self.__hpx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(hp), hpx[1] - hpx[0])))
hpfs.append(np.fft.fftshift(np.fft.fft(hp)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley H\' Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.real(hpf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.imag(hpf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.abs(hpf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.angle(hpf), c=c, label=lbl)
# Plot Ripley's L\'
if l_metric:
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley L first derivative')
plt.xlabel('Radius (nm)')
plt.ylabel('L\'')
for (lp, lpx, c, lbl) in zip(self.__lp, self.__lpx, color, labels):
plt.plot(lpx, lp, c=c, label=lbl)
if len(self.__lp) > 0:
plt.plot(self.__lpx[0], np.ones(shape=len(self.__lp[0])), 'k--')
# Plot Ripley's L' Fourier analysis
if fourier:
# Compute FFT
lpfs = list()
freqs = list()
for (lp, lpx) in zip(self.__lp, self.__lpx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(lp), lpx[1] - lpx[0])))
lpfs.append(np.fft.fftshift(np.fft.fft(lp)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley L\' Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.real(lpf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.imag(lpf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.abs(lpf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.angle(lpf), c=c, label=lbl)
# Plot Riley's H statistics
if r_stat:
# Compute stats
maxs = list()
medians = list()
stds = list()
for h in self.__h:
maxs.append(h.max())
medians.append(np.median(h))
stds.append(h.std())
nsam = np.arange(len(maxs))
# Plotting
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H maximum')
plt.xlabel('Sample')
plt.ylabel('H maximum')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(maxs, dtype=np.float))
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H medians')
plt.xlabel('Sample')
plt.ylabel('H medians')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(medians, dtype=np.float))
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H standard deviations')
plt.xlabel('Sample')
plt.ylabel('H deviations')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(stds, dtype=np.float))
# Show
plt.show(block=block)
# Plot into figures the current analysis state
# path: path to the folder where figures will be stored
# cloud_over: if True (default) all clouds are plot in the same figure
# fourier: it True (default) the fourier analysis is also plotted
def store_figs(self, path, cloud_over=True, fourier=True):
# Initialization
fig_count = 0
labels = self.__srs
ind = np.arange(1, len(labels)+1)
color = cm.rainbow(np.linspace(0, 1, len(self.__srs)))
# Plot clouds
if cloud_over:
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for i, cloud in enumerate(self.__clouds):
if cloud.shape[0] > 0:
if (len(self.__cards) <= 0) or (self.__cards[i] is None):
plt.scatter(cloud[:, 0], cloud[:, 1], c=color[i])
else:
cax = plt.scatter(cloud[:, 0], cloud[:, 1], c=self.__cards[i], cmap=cm.jet)
plt.colorbar(cax, orientation='horizontal')
plt.savefig(path + '/clouds.png')
plt.close()
else:
for i, cloud in enumerate(self.__clouds):
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points ' + labels[i])
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
if cloud.shape[0] > 0:
if (len(self.__cards) <= 0) or (self.__cards[i] is None):
plt.scatter(cloud[:, 0], cloud[:, 1])
else:
cax = plt.scatter(cloud[:, 0], cloud[:, 1], c=self.__cards[i], cmap=cm.jet)
plt.colorbar(cax, orientation='horizontal')
plt.savefig(path + '/cloud_' + labels[i] + '.png')
plt.close()
# Plot densities
fig_count += 1
plt.figure(fig_count)
plt.title('Points density')
plt.xlabel('Sample')
plt.ylabel('Density (points/nm^2)')
plt.xlim(ind[0]-1, ind[-1]+1)
plt.stem(ind, np.asarray(self.__dens, dtype=np.float))
plt.savefig(path + '/dens.png')
plt.close()
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__grmx, self.__grm, 'k')
if self.__p_f is not None:
plt.plot(self.__grmx, self.__grm1, 'k--')
plt.plot(self.__grmx, self.__grm2, 'k--')
lines = list()
for (g, gx, c, lbl) in zip(self.__g, self.__gx, color, labels):
line, = plt.plot(gx, g, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
plt.savefig(path + '/g.png')
plt.close()
# Plot F-Function
fig_count += 1
plt.figure(fig_count)
plt.title('F-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('F')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__frmx, self.__frm, 'k')
if self.__p_f is not None:
plt.plot(self.__frmx, self.__frm1, 'k--')
plt.plot(self.__frmx, self.__frm2, 'k--')
lines = list()
for (f, fx, c, lbl) in zip(self.__f, self.__fx, color, labels):
line, = plt.plot(fx, f, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
plt.savefig(path + '/f.png')
plt.close()
# Plot Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__h, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__h) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__h[0])), 'k--')
plt.savefig(path + '/h.png')
plt.close()
# Plot Ripley's H Fourier analysis
if fourier:
# Compute FFT
hfs = list()
freqs = list()
for (h, hx) in zip(self.__h, self.__hx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(h), hx[1] - hx[0])))
hfs.append(np.fft.fftshift(np.fft.fft(h)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley H Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.real(hf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.imag(hf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.abs(hf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (hf, f, c, lbl) in zip(hfs, freqs, color, labels):
plt.plot(f, np.angle(hf), c=c, label=lbl)
plt.savefig(path + '/h_f.png')
plt.close()
# Plot Ripley's L
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley L')
plt.xlabel('Radius (nm)')
plt.ylabel('L')
for (l, lx, c, lbl) in zip(self.__l, self.__lx, color, labels):
plt.plot(lx, l, c=c, label=lbl)
if len(self.__l) > 0:
plt.plot(self.__lx[0], self.__lx[0], 'k--')
plt.savefig(path + '/l.png')
plt.close()
# Plot Ripley's L Fourier analysis
if fourier:
# Compute FFT
lfs = list()
freqs = list()
for (l, lx) in zip(self.__l, self.__lx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(l), lx[1] - lx[0])))
lfs.append(np.fft.fftshift(np.fft.fft(l)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley L Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.real(lf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.imag(lf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.abs(lf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (lf, f, c, lbl) in zip(lfs, freqs, color, labels):
plt.plot(f, np.angle(lf), c=c, label=lbl)
plt.savefig(path + '/l_f.png')
plt.close()
# Plot Ripley's H'
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley H first derivative')
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
for (hp, hpx, c, lbl) in zip(self.__hp, self.__hpx, color, labels):
plt.plot(hpx, hp, c=c, label=lbl)
if len(self.__hp) > 0:
plt.plot(self.__hpx[0], np.zeros(shape=len(self.__hp[0])), 'k--')
plt.savefig(path + '/hp.png')
plt.close()
# Plot Ripley's H' Fourier analysis
if fourier:
# Compute FFT
hpfs = list()
freqs = list()
for (hp, hpx) in zip(self.__hp, self.__hpx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(hp), hpx[1] - hpx[0])))
hpfs.append(np.fft.fftshift(np.fft.fft(hp)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley H\' Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.real(hpf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.imag(hpf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.abs(hpf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (hpf, f, c, lbl) in zip(hpfs, freqs, color, labels):
plt.plot(f, np.angle(hpf), c=c, label=lbl)
plt.savefig(path + '/hp_f.png')
plt.close()
# Plot Ripley's L
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley L first derivative')
plt.xlabel('Radius (nm)')
plt.ylabel('L\'')
for (lp, lpx, c, lbl) in zip(self.__lp, self.__lpx, color, labels):
plt.plot(lpx, lp, c=c, label=lbl)
if len(self.__lp) > 0:
plt.plot(self.__lpx[0], np.ones(shape=len(self.__lp[0])), 'k--')
plt.savefig(path + '/lp.png')
plt.close()
# Plot Ripley's L' Fourier analysis
if fourier:
# Compute FFT
lpfs = list()
freqs = list()
for (lp, lpx) in zip(self.__lp, self.__lpx):
freqs.append(np.fft.fftshift(np.fft.fftfreq(len(lp), lpx[1] - lpx[0])))
lpfs.append(np.fft.fftshift(np.fft.fft(lp)))
# Figures
fig_count += 1
plt.figure(str(fig_count) + '- Ripley L\' Fourier analysis')
plt.subplot(2, 2, 1)
plt.xlabel('Freq')
plt.ylabel('Real')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.real(lpf), c=c, label=lbl)
plt.subplot(2, 2, 2)
plt.xlabel('Freq')
plt.ylabel('Imag')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.imag(lpf), c=c, label=lbl)
plt.subplot(2, 2, 3)
plt.xlabel('Freq')
plt.ylabel('Abs')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.abs(lpf), c=c, label=lbl)
plt.subplot(2, 2, 4)
plt.xlabel('Freq')
plt.ylabel('Angle')
for (lpf, f, c, lbl) in zip(lpfs, freqs, color, labels):
plt.plot(f, np.angle(lpf), c=c, label=lbl)
plt.savefig(path + '/lp_f.png')
plt.close()
#### External abstract functionality
@abstractmethod
def insert_cloud(self, cloud, sr, clsts=None, mask=None):
self.__clouds.append(cloud)
self.__srs.append(sr)
area = (self.__box[2] - self.__box[0]) * (self.__box[3] - self.__box[1])
if area > 0:
self.__dens.append(cloud.shape[0] / area)
else:
self.__dens.append(0.)
@abstractmethod
def pickle(self, fname):
raise NotImplementedError('pickle() (SpA). '
'Abstract method, it requires an implementation.')
#### Internal implemented functionality
# Computes Ripley's function in H and updates the correspondent lists
# n: number of samples
# max_d: max distance for being considered
# border: if 0 (default) border compensation is not active, 1 points inflation mode, 2 Goreaud
# Returns: Ripley's K values and samples respectively
def __ripleys_H_test(self, n, max_d, border=0):
# Initialization
self.__h = list()
self.__hx = list()
box = self.__box
# Ripleys H computation
for i, cloud in enumerate(self.__clouds):
if border == 1:
# Inflate point cloud
cloud_inf = self.__inflate_2D(cloud)
hold_h, hold_x = self.__ripley(cloud_inf, box, n, max_d)
elif border == 2:
hold_h, hold_x = self.__ripley_goreaud(cloud, box, n, max_d)
else:
hold_h, hold_x = self.__ripley(cloud, box, n, max_d)
# Low pass filtering
self.__h.append(lfilter(self.__lpf[0], self.__lpf[1], hold_h))
# self.__h.append(hold_h)
self.__hx.append(hold_x)
# Computes Ripley's L form from H
def __ripleys_L(self):
for (hx, h) in zip(self.__hx, self.__h):
self.__lx.append(hx)
# Low pass filtering
self.__l.append(lfilter(self.__lpf[0], self.__lpf[1], h+hx))
# Computes Ripley's H first derivative
def __ripleys_Hp(self):
for (hx, h) in zip(self.__hx, self.__h):
self.__hpx.append(hx)
if hx.shape[0] > 1:
# Equally spaced differential
self.__hp.append(np.gradient(h, hx[1] - hx[0]))
else:
self.__hp.append(np.asarray(.0))
# Computes Ripley's L first derivative
def __ripleys_Lp(self):
for (lx, l) in zip(self.__lx, self.__l):
self.__lpx.append(lx)
if lx.shape[0] > 1:
# Equally spaced differential
self.__lp.append(np.gradient(l, lx[1] - lx[0]))
else:
self.__lp.append(np.asarray(.0))
def __is_not_closer_to_border(self, p, box, max_d):
# Border distances
hold = p[0] - box[0]
d_1 = math.sqrt(hold * hold)
hold = p[0] - box[2]
d_2 = math.sqrt(hold * hold)
hold = p[1] - box[1]
d_3 = math.sqrt(hold * hold)
hold = p[1] - box[3]
d_4 = math.sqrt(hold * hold)
if (d_1 < max_d) or (d_2 < max_d) or (d_3 < max_d) or (d_4 < max_d):
return False
else:
return True
# Inflates a 2D spatial cloud of points by adding 8 flipped versions of the original data
# in its neighbourhood
def __inflate_2D(self, cloud):
# Flipping
flip_x, flip_y = flip_cloud(cloud, 0), flip_cloud(cloud, 1)
flip_xy = flip_cloud(flip_x, 1)
# Computing bounding box
min_x, min_y, max_x, max_y = cloud[:, 0].min(), cloud[:, 1].min(), \
cloud[:, 0].max(), cloud[:, 1].max()
# Adding neighbours
c_00 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_01 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_02 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_10 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_12 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_20 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_21 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_22 = np.zeros(shape=cloud.shape, dtype=cloud.dtype)
c_00[:, 0], c_00[:, 1] = flip_xy[:, 0] - max_x, flip_xy[:, 1] - max_y
c_01[:, 0], c_01[:, 1] = flip_x[:, 0] - max_x, flip_x[:, 1]
c_02[:, 0], c_02[:, 1] = flip_xy[:, 0] - max_x, flip_xy[:, 1] + max_y
c_10[:, 0], c_10[:, 1] = flip_y[:, 0], flip_y[:, 1] - max_y
c_12[:, 0], c_12[:, 1] = flip_y[:, 0], flip_y[:, 1] + max_y
c_20[:, 0], c_20[:, 1] = flip_xy[:, 0] + max_x, flip_xy[:, 1] - max_y
c_21[:, 0], c_21[:, 1] = flip_x[:, 0] + max_x, flip_x[:, 1]
c_22[:, 0], c_22[:, 1] = flip_xy[:, 0] + max_x, flip_xy[:, 1] + max_y
# Concatenate result
return np.concatenate([c_00, c_01, c_02, c_10, cloud, c_12, c_20, c_21, c_22], axis=0)
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley(self, cloud, box, n, max_d):
# Non-edge correction points detection
hold = (cloud[:, 0] >= box[0]) & (cloud[:, 1] >= box[1]) & \
(cloud[:, 0] <= box[2]) & (cloud[:, 1] <= box[3])
core_ids = np.where(hold)[0]
# Initialization
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(len(core_ids))
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
k_hold = ((dists > 0) & (dists < r)).sum()
# Updating K entry
K[k] += k_hold
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
# Edge compensation as Goreaud specifies [J. Vegetation Sci. 10: 433-438, 1999]
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley_goreaud(self, cloud, box, n, max_d):
# Initialization
pi_2 = 2 * np.pi
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(cloud.shape[0])
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
if r == 0:
continue
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where((dists > 0) & (dists < r))[0]
# Loop for neighbours
p = cloud[i, :]
weights = np.ones(shape=len(ids), dtype=np.float)
# Distance to edges
hold_dists = list()
hold_dists.append(box[2] - p[0])
hold_dists.append(p[1] - box[1])
hold_dists.append(p[0] - box[0])
hold_dists.append(box[3] - p[1])
hold_dists = np.asarray(hold_dists, dtype=np.float)
hold_dists = np.sqrt(hold_dists * hold_dists)
hold_dists = np.sort(hold_dists)
d1, d2, d3, d4 = hold_dists[0], hold_dists[1], hold_dists[2], hold_dists[3]
for j, idx in enumerate(ids):
# Compute distance to neighbour
pn = cloud[idx, :]
hold_r = p - pn
rj = math.sqrt((hold_r * hold_r).sum())
#### Edge compensation
# Switch for computing angle
if (rj > d1) and (rj <= d2) and (rj <= d3) and (rj <= d4):
alpha = 2 * math.acos(d1 / rj)
elif (rj > d1) and (rj > d2) and (rj <= d3) and (rj <= d4):
dh = d1*d1 + d2*d2
r2 = rj * rj
if r2 <= dh:
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj)
else:
alpha = .5*np.pi + math.acos(d1/r) + math.acos(d2/r)
elif (rj > d1) and (rj > d3) and (rj <= d2) and (rj <= d4):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d3/rj)
elif (rj > d1) and (rj > d2) and (rj > d3) and (rj <= d4):
d12 = d1*d1 + d2*d2
d23 = d2*d2 + d3*d3
r2 = rj * rj
if (r2 <= d12) and (r2 <= d23):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj) + 2*math.acos(d3/rj)
elif (r2 <= d12) and (r2 > d23):
alpha = .5*np.pi + 2*math.acos(d1/rj) + math.acos(d2/rj) + math.acos(d3/rj)
else:
alpha = np.pi + math.acos(d1/rj) + math.acos(d3/rj)
else:
alpha = .0
# Correcting factor
if alpha > pi_2:
weights[j] = 0.
else:
weights[j] = pi_2 / (pi_2 - alpha)
# Updating K entry
K[k] += (weights.sum())
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
# Computes G function for every slice
# n: number of samples for cdf
def __function_G(self, n):
# Generate random points
for i, cloud in enumerate(self.__clouds):
dists = nnde(cloud)
hold_g, hold_gx = compute_cdf(dists, n)
self.__g.append(hold_g)
self.__gx.append(hold_gx)
#### Internal abstract functionality
# n: number of samples for cdf
# m: number of simulations for cdf
@abstractmethod
def __function_F(self, n, m):
raise NotImplementedError('__function_F() (SpA). '
'Abstract method, it requires an implementation.')
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes, inf None (default) only median is computed
# Returns: samples, F-Function, median and percentiles p and 100-p, if p is None the last two
# are zero arrays
@abstractmethod
def __rand_function_F(self, n, m, p=None):
raise NotImplementedError('__rand_function_F() (SpA). '
'Abstract method, it requires an implementation.')
# n: number of samples for cdf
# m: number of simulations for cdf
# p: percentile for computing envelopes, inf None (default) only median is computed
# Returns: samples, G-Function, median and percentiles p and 100-p, if p is None the last two
# are zero arrays
@abstractmethod
def __rand_function_G(self, n, m, p=None):
raise NotImplementedError('__rand_function_G() (SpA). '
'Abstract method, it requires an implementation.')
###########################################################################################
# Class for doing a spatial analysis from several independent set of points in a slice
###########################################################################################
class SetCloudsP(SlA):
# box: unique bounding box
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating F and G functions
# r_max: maximum distance for Ripley's H in nm
# r_bord: if 0 (default) border compensation is not active, 1 points inflation mode and
# 2 Goreaud
# p_f: confidence percentile for F and G functions
def __init__(self, box, n_samp, n_sim_f, r_max, r_bord, p_f):
super(SetCloudsP, self).__init__(box, n_samp, n_sim_f, r_max, r_bord, p_f)
#### Set/Get methods area
#### External functionality area
# cloud: array with point coordinates in a plane [n, 2]
# box: bounding box [x_min, y_min, x_max, y_max]
# sr: sample range [low, high]
# cards: array with point cardinalities
def insert_cloud(self, cloud, sr, cards=None):
super(SetCloudsP, self).insert_cloud(cloud, sr)
self._SlA__cards.append(cards)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
#### Internal functionality area
def _SlA__rand_function_G(self, n, m, p=None):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self._SlA__clouds)), dtype=np.float)
cont = 0
# Random simulation
for i in range(m):
for j, cloud in enumerate(self._SlA__clouds):
rand_dists = nnde(gen_rand_cloud(cloud.shape[0], self._SlA__box))
cdfs[:, cont], _ = compute_cdf(rand_dists, n)
cont += 1
# Real data
for cloud in self._SlA__clouds:
hold_dists = nnde(cloud)
dists += hold_dists.tolist()
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_05 = func_envelope(cdfs, per=50)
if p is None:
return sp, env_05, \
np.zeros(shape=len(sp), dtype=np.float), np.zeros(shape=len(sp), dtype=np.float)
else:
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=100-p)
return sp, env_05, env_1, env_2
def _SlA__rand_function_F(self, n, m, p=None):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self._SlA__clouds)), dtype=np.float)
cont = 0
# Random simulation and real data
for i in range(m):
for j, cloud in enumerate(self._SlA__clouds):
cloud_1 = gen_rand_cloud(cloud.shape[0], self._SlA__box)
cloud_2 = gen_rand_cloud(cloud.shape[0], self._SlA__box)
rand_dists = cnnde(cloud_1, cloud_2)
cdfs[:, cont], _ = compute_cdf(rand_dists, n)
hold_dists = cnnde(cloud_1, cloud)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_05 = func_envelope(cdfs, per=50)
if p is None:
return sp, env_05, \
np.zeros(shape=len(sp), dtype=np.float), np.zeros(shape=len(sp), dtype=np.float)
else:
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=100-p)
return sp, env_05, env_1, env_2
# Computes F function for every slice
# n: number of samples for cdf
# m: number of random simulations
# Returns: F-Function values and samples respectively
def _SlA__function_F(self, n, m):
# Generate random points
for i, cloud in enumerate(self._SlA__clouds):
dists = list()
for j in range(m):
dists += cnnde(cloud, gen_rand_cloud(cloud.shape[0], self._SlA__box)).tolist()
dists = np.asarray(dists, dtype=np.float)
hold_f, hold_fx = compute_cdf(dists, n)
self._SlA__f.append(hold_f)
self._SlA__fx.append(hold_fx)
###########################################################################################
# Class for doing a spatial analysis from cluster of points in a slice
###########################################################################################
class SetClustersP(SlA):
# box: unique bounding box
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating F and G functions
# r_max: maximum distance for Ripley's H in nm
# r_bord: if 0 (default) border compensation is not active, 1 points inflation mode and
# 2 Goreaud
# p_f: confidence percentile for F and G functions
# r_t: number of tries for random clusters generation
def __init__(self, box, n_samp, n_sim_f, r_max, r_bord, p_f, r_t=50):
super(SetClustersP, self).__init__(box, n_samp, n_sim_f, r_max, r_bord, p_f)
self.__clsts_l = list()
self.__masks_l = list()
self.__r_t = r_t
#### External functionality area
# cloud: array with point coordinates of clusters centers of gravity in a plane [n, 2]
# sr: sample range [low, high]
# clsts: ordered list with clusters, each clusters is an array of points
# mask: mask where False-values mark invalid regions
def insert_cloud(self, cloud_cg, sr, clsts, mask):
super(SetClustersP, self).insert_cloud(cloud_cg, sr)
self.__clsts_l.append(clsts)
self.__masks_l.append(mask)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
#### Internal functionality area
# Generates a random distribution of the internal clusters
# clsts: list of clusters
# box: bounding box
# mask: binary mask where False valued regions are invalids
# tries: number of tries for getting the less overlapped location for every cluster
# Returns: an array with new centroids
def __get_rand_clsts(self, clsts, box, mask):
# Initialization
n_cgs = np.zeros(shape=(len(clsts), 2), dtype=np.float)
# Loop for clusters
mask_h = np.copy(mask)
for i, c_cloud in enumerate(clsts):
# Translate to base coordinates and computes minimum distance to center of gravity
cg = c_cloud.mean(axis=0)
f_cloud = c_cloud - cg
# Compute valid search areas
dst_t = sp.ndimage.morphology.distance_transform_edt(mask_h)
mask_dst = np.zeros(shape=mask_h.shape, dtype=mask_h.dtype)
mask_dst[dst_t > 0] = True
if (dst_t > 0).sum() <= 0:
error_msg = 'Mask fully overlapped.'
raise pexceptions.PySegTransitionError(expr='__get_rand_clsts (SetClustersP)',
msg=error_msg)
# Keep the best try (lower overlapping)
min_ov = MAX_FLOAT
h_cg = None
h_chull = np.zeros(shape=mask_h.shape, dtype=mask_h.dtype)
for c_try in range(self.__r_t):
# Random selection for the new centroid from valid areas
m_ids = np.where(mask_dst)
r_x, r_y = np.random.randint(0, len(m_ids[0])), np.random.randint(0, len(m_ids[1]))
cg_x, cg_y = m_ids[0][r_x], m_ids[1][r_y]
# Rotate randomly against base center [0, 0]
rho = np.random.rand() * (2*np.pi)
sinr, cosr = math.sin(rho), math.cos(rho)
r_cloud = np.zeros(shape=f_cloud.shape, dtype=f_cloud.dtype)
r_cloud[:, 0] = f_cloud[:, 0]*cosr - f_cloud[:, 1]*sinr
r_cloud[:, 1] = f_cloud[:, 0]*sinr + f_cloud[:, 1]*cosr
# Translation to randomly already selected center
n_cg = np.asarray((cg_x, cg_y) , dtype=np.float)
# v = n_cg - cg
t_cloud = r_cloud + n_cg
chull, _ = self.__compute_chull_no_bound(t_cloud, box)
# Update minimum overlap
ov = chull.sum() - (chull * mask_h).sum()
if ov < min_ov:
min_ov = ov
h_cg = n_cg
h_chull = chull
if ov == 0:
break
else:
if h_cg is None:
h_cg = n_cg
# Update mask
mask_h[h_chull] = False
# Get new center transposed
n_cgs[i, 0] = h_cg[1]
n_cgs[i, 1] = h_cg[0]
return n_cgs
# Returns convex hull and discard points out of bounds are discarded and no exception is
# raised, instead in a second variable a true is returned
def __compute_chull_no_bound(self, c_cloud, box):
# Create holding image
off_x = math.floor(box[1])
off_y = math.floor(box[0])
m, n = math.ceil(box[3]) - off_x + 1, math.ceil(box[2]) - off_y + 1
img = np.zeros(shape=(m, n), dtype=np.bool)
# Filling holding image
hold = np.asarray(np.round(c_cloud), dtype=np.int)
hold[:, 0] -= off_y
hold[:, 1] -= off_x
excep = False
p_count = 0
for p in hold:
try:
img[p[0], p[1]] = True
except IndexError:
excep = True
continue
p_count += 1
# Computing the convex hull
if p_count > 0:
chull = np.asarray(convex_hull_image(img), dtype=np.bool)
else:
chull = img
return chull, excep
def _SlA__rand_function_G(self, n, m, p=None):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self.__clsts_l)), dtype=np.float)
cont = 0
for i in range(m):
for j, clsts in enumerate(self.__clsts_l):
hold_dists = nnde(self.__get_rand_clsts(clsts, self._SlA__box,
self.__masks_l[j]))
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_05 = func_envelope(cdfs, per=50)
if p is None:
return sp, env_05, \
np.zeros(shape=len(sp), dtype=np.float), np.zeros(shape=len(sp), dtype=np.float)
else:
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=100-p)
return sp, env_05, env_1, env_2
def _SlA__rand_function_F(self, n, m, p=None):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self.__clsts_l)), dtype=np.float)
cont = 0
for i in range(m):
for j, clsts in enumerate(self.__clsts_l):
cloud_1 = self.__get_rand_clsts(clsts, self._SlA__box, self.__masks_l[j])
cloud_2 = self.__get_rand_clsts(clsts, self._SlA__box, self.__masks_l[j])
hold_dists = cnnde(cloud_1, cloud_2)
cdfs[:, cont], _ = compute_cdf(hold_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_05 = func_envelope(cdfs, per=50)
if p is None:
return sp, env_05, \
np.zeros(shape=len(sp), dtype=np.float), np.zeros(shape=len(sp), dtype=np.float)
else:
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=100-p)
return sp, env_05, env_1, env_2
# Computes F function for every slice
# n: number of samples for cdf
# m: number of random simulations
def _SlA__function_F(self, n, m):
# Generate random points
for i, cloud in enumerate(self._SlA__clouds):
dists = list()
for j in range(m):
dists += cnnde(cloud, self.__get_rand_clsts(self.__clsts_l[i],
self._SlA__box,
self.__masks_l[i])).tolist()
dists = np.asarray(dists, dtype=np.float)
hold_f, hold_fx = compute_cdf(dists, n)
self._SlA__f.append(hold_f)
self._SlA__fx.append(hold_fx)
###########################################################################################
# Class for doing a spatial analysis from a pair of point clouds
# VERY IMPORTANT: only valid for 2D data
###########################################################################################
class PairClouds(object):
# cloud_a/b: the pair of clouds (2D)
# box: bounding box [x_min, y_min, x_max, y_max] or the enclosing euclidean space
def __init__(self, cloud_a, cloud_b, box):
self.__cloud_a = cloud_a
self.__cloud_b = cloud_b
self.__box = box
# For image indexing
self.__ox = int(math.floor(self.__box[1]))
self.__oy = int(math.floor(self.__box[0]))
self.__m = int(math.ceil(self.__box[3]) - self.__ox + 1)
self.__n = int(math.ceil(self.__box[2]) - self.__oy + 1)
#### Get/Set methods
#### External functionality area
# Classifies the euclidean space according kNN classifier (Brute Force)
# k: number of neighbours (it should be odd)
# mask: if not None (default), image with bounding box dimensions where 0 values
# sets the background
# Return: image with box dimensions, pixel value: 1 side A, 2 side B and 0 bg
def knn(self, k, mask=None):
# Initialization
img = np.zeros(shape=(self.__m, self.__n), dtype=np.uint8)
cloud_a = np.asarray(np.round(self.__cloud_a), dtype=np.int)
cloud_b = np.asarray(np.round(self.__cloud_b), dtype=np.int)
cloud = np.concatenate((cloud_a, cloud_b), axis=0)
cloud[:, 0] -= self.__oy
cloud[:, 1] -= self.__ox
border = cloud_a.shape[0]
# Applying kNN criteria
for y in range(self.__m):
for x in range(self.__n):
# Pixel distance to all cloud points
pix = np.asarray((x, y))
hold = pix - cloud
# Getting the k-neighbours
idx = np.argsort(np.sum(hold*hold, axis=1))[0:k]
# kNN discriminant
s_a = np.sum(idx < border)
s_b = len(idx) - s_a
if s_a > s_b:
img[y, x] = 1
else:
img[y, x] = 2
# Masking
if mask is not None:
img[mask == 0] = 0
return img
# Customized kNN classifier
# k: number of neighbours
# mask: if not None (default), image with bounding box dimensions where 0 values
# sets the background
# max_dist: if closest neighbour is farther than it, it is considered bg, default (MAX_FLOAT).
# This distance is meausred in pixels
# Return: image with box dimensions, pixel value: 1 side A, 2 side B, 3 mix and 0 bg
def knnc(self, k, mask=None, max_dist=MAX_FLOAT):
# Initialization
img = np.zeros(shape=(self.__m, self.__n), dtype=np.uint8)
cloud_a = np.asarray(np.round(self.__cloud_a), dtype=np.int)
cloud_b = np.asarray(np.round(self.__cloud_b), dtype=np.int)
cloud = np.concatenate((cloud_a, cloud_b), axis=0)
cloud[:, 0] -= self.__oy
cloud[:, 1] -= self.__ox
border = cloud_a.shape[0]
# Applying kNN criteria
for y in range(self.__m):
for x in range(self.__n):
# Pixel distance to all cloud points
pix = np.asarray((x, y))
hold = pix - cloud
# Getting the k-neighbours
dists = np.sum(hold*hold, axis=1)
idx = np.argsort(dists)[0:k]
f_neigh = math.sqrt(dists[idx[0]])
# kNN discriminant
if f_neigh < max_dist:
lidx = len(idx)
s_a = np.sum(idx < border)
s_b = lidx - s_a
if s_a == lidx:
img[y, x] = 1
elif s_b == lidx:
img[y, x] = 2
else:
img[y, x] = 3
# Masking
if mask is not None:
img[mask == 0] = 0
return img
###########################################################################################
# Class for doing a spatial analysis from pairs of independent set of points in a slice
###########################################################################################
class SetPairClouds(object):
# box_x: 2D boxes of every element of the pair, they should overlap
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating G function
# r_max: maximum distance for Ripley's H in nm
# p_f: confidence percentile for F and G functions
# fwd: if True (default) forward cross Ripley's H is computed, backward if False
def __init__(self, box_a, box_b, n_samp, n_sim_f, r_max, p_f, fwd=True):
self.__n = n_samp
self.__nsim_f = n_sim_f
self.__r_max = r_max
self.__r_bord = 0
self.__p_f = p_f
self.__clouds_a = list()
self.__clouds_b = list()
self.__srs = list()
self.__g = list()
self.__gx = list()
self.__grm = np.zeros(shape=n_samp, dtype=np.float)
self.__grm1 = np.zeros(shape=n_samp, dtype=np.float)
self.__grm2 = np.zeros(shape=n_samp, dtype=np.float)
self.__grmx = np.zeros(shape=n_samp, dtype=np.float)
self.__hcr = list()
self.__hcp = list()
self.__hx = list()
self.__box = merge_boxes_2D(box_a, box_b)
l1 = self.__box[0] * self.__box[2]
l2 = self.__box[1] * self.__box[3]
if (l1 < self.__r_max) or (l2 < self.__r_max):
error_msg = 'Ripley\'s H range bigger than overlapped box.'
raise pexceptions.PySegInputWarning(expr='__init__ (SetPairClouds)',
msg=error_msg)
# Low pass filter for differentials
b, a = butter(5, .3, btype='low', analog=False)
self.__lpf = (b, a)
self.__fwd = fwd
###### Set/Get functionality
def get_slice_ranges(self):
return self.__srs
def get_cross_ripley_H(self):
return self.__hcr, self.__hx
def get_comp_ripley_H(self):
return self.__hcp, self.__hx
def get_cross_G(self):
return self.__g, self.__gx
###### External functionality area
def insert_pair(self, cloud_a, cloud_b, sr):
self.__clouds_a.append(cloud_a)
self.__clouds_b.append(cloud_b)
self.__srs.append(sr)
# Computes crossed F-Function and Ripley's H, and complemented Ripley's H
def analyze(self, verbose=False):
if verbose:
sys.stdout.write('Progress: 0% ... ')
# G-Function
self.__function_cross_G(self.__n)
if verbose:
sys.stdout.write('20% ... ')
if self.__nsim_f > 0:
self.__grmx, self.__grm, self.__grm1, self.__grm2 = self.__rand_function_cross_G(self.__n,
self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('40% ... ')
self.__ripleys_H_cross(self.__n, self.__r_max)
if verbose:
sys.stdout.write('80% ... ')
self.__ripleys_H_comp(self.__n, self.__r_max)
# Ripley's crossed metrics
if verbose:
print('100%')
# Plot into figures the current analysis state
# block: if True (default False) waits for closing windows for finishing the execution
# cloud_over: if True (default) all clouds are plot in the same figure
# fourier: it True (default) the fourier analysis is also plotted
# l_metric: it True (default False) the Ripley's L metric is computed
# r_stat: it True (default False) Ripley's H statistics are measured
def plot(self, block=False, cloud_over=True, r_stat=False):
# Initialization
fig_count = 0
if block:
plt.ion()
labels = self.__srs
ind = np.arange(1, len(labels)+1)
color = cm.rainbow(np.linspace(0, 1, len(self.__srs)))
# Plot clouds
if cloud_over:
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for i, cloud_a in enumerate(self.__clouds_a):
if cloud_a.shape[0] > 0:
plt.scatter(cloud_a[:, 0], cloud_a[:, 1], c=color[i], marker='.')
if self.__clouds_b[i].shape[0] > 0:
plt.scatter(self.__clouds_b[i][:, 0], self.__clouds_b[i][:, 1], c=color[i], marker='x')
else:
for i, cloud_a in enumerate(self.__clouds_a):
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points ' + labels[i])
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
if cloud_a.shape[0] > 0:
plt.scatter(cloud_a[:, 0], cloud_a[:, 1], marker='.')
if self.__clouds_b[i].shape[0] > 0:
plt.scatter(self.__clouds_b[i][:, 0], self.__clouds_b[i][:, 1], marker='x')
# Plot crossed G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Crossed G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__grmx, self.__grm, 'k')
if self.__p_f is not None:
plt.plot(self.__grmx, self.__grm1, 'k--')
plt.plot(self.__grmx, self.__grm2, 'k--')
lines = list()
for (g, gx, c, lbl) in zip(self.__g, self.__gx, color, labels):
line, = plt.plot(gx, g, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# Plot crossed Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Crossed Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__hcr, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__hcr) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__hcr[0])), 'k--')
# Plot complemented Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Complemented Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__hcp, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__hcp) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__hcp[0])), 'k--')
# Plot Riley's H statistics
if r_stat:
# Compute stats
maxs = list()
medians = list()
stds = list()
for h in self.__h:
maxs.append(h.max())
medians.append(np.median(h))
stds.append(h.std())
nsam = np.arange(len(maxs))
# Plotting
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H maximum')
plt.xlabel('Sample')
plt.ylabel('H maximum')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(maxs, dtype=np.float))
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H medians')
plt.xlabel('Sample')
plt.ylabel('H medians')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(medians, dtype=np.float))
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H standard deviations')
plt.xlabel('Sample')
plt.ylabel('H deviations')
plt.xlim(nsam[0]-1, nsam[-1]+1)
plt.stem(nsam, np.asarray(stds, dtype=np.float))
# Show
plt.show(block=block)
# Plot into figures the current analysis state
# path: path to the folder where figures will be stored
# fourier: it True (default) the fourier analysis is also plotted
def store_figs(self, path, cloud_over=True):
# Initialization
fig_count = 0
labels = self.__srs
color = cm.rainbow(np.linspace(0, 1, len(self.__srs)))
# Plot clouds
if cloud_over:
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for i, cloud_a in enumerate(self.__clouds_a):
if cloud_a.shape[0] > 0:
plt.scatter(cloud_a[:, 0], cloud_a[:, 1], c=color[i], marker='.')
if self.__clouds_b[i].shape[0] > 0:
plt.scatter(self.__clouds_b[i][:, 0], self.__clouds_b[i][:, 1], c=color[i], marker='x')
plt.savefig(path + '/clouds.png')
plt.close()
else:
for i, cloud_a in enumerate(self.__clouds_a):
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points ' + labels[i])
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
if cloud_a.shape[0] > 0:
plt.scatter(cloud_a[:, 0], cloud_a[:, 1], marker='.')
if self.__clouds_b[i].shape[0] > 0:
plt.scatter(self.__clouds_b[i][:, 0], self.__clouds_b[i][:, 1], marker='x')
plt.savefig(path + '/cloud_' + labels[i] + '.png')
plt.close()
# Plot crossed G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Crossed G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__nsim_f > 0:
plt.plot(self.__grmx, self.__grm, 'k')
if self.__p_f is not None:
plt.plot(self.__grmx, self.__grm1, 'k--')
plt.plot(self.__grmx, self.__grm2, 'k--')
lines = list()
for (g, gx, c, lbl) in zip(self.__g, self.__gx, color, labels):
line, = plt.plot(gx, g, c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
plt.savefig(path + '/g_cr.png')
plt.close()
# Plot crossed Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Crossed Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__hcr, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__hcr) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__hcr[0])), 'k--')
plt.savefig(path + '/h_cr.png')
plt.close()
# Plot complemented Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Complemented Ripley H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
for (h, hx, c, lbl) in zip(self.__hcp, self.__hx, color, labels):
plt.plot(hx, h, c=c, label=lbl)
if len(self.__hcp) > 0:
plt.plot(self.__hx[0], np.zeros(shape=len(self.__hcp[0])), 'k--')
plt.savefig(path + '/h_cp.png')
plt.close()
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
##### Internal functionality area
# Computes crossed G function for a pair of slices
# n: number of samples for cdf
def __function_cross_G(self, n):
# Generate random points
for cloud_a, cloud_b in zip(self.__clouds_a, self.__clouds_b):
dists = cnnde(cloud_b, cloud_a)
hold_g, hold_gx = compute_cdf(dists, n)
self.__g.append(hold_g)
self.__gx.append(hold_gx)
def __rand_function_cross_G(self, n, m, p=None):
# Generate random points
dists = list()
cdfs = np.zeros(shape=(n, m*len(self.__clouds_b)), dtype=np.float)
cont = 0
for i in range(m):
for j, cloud_b in enumerate(self.__clouds_b):
rand_cloud_b = gen_rand_cloud(cloud_b.shape[0], self.__box)
rand_cloud_a = gen_rand_cloud(self.__clouds_a[j].shape[0], self.__box)
hold_dists = cnnde(rand_cloud_b, self.__clouds_a[j])
rand_dists = cnnde(rand_cloud_b, rand_cloud_a)
cdfs[:, cont], _ = compute_cdf(rand_dists, n)
dists += hold_dists.tolist()
cont += 1
dists = np.asarray(dists, dtype=np.float)
# Compute results
gf, sp = compute_cdf(dists, n)
env_05 = func_envelope(cdfs, per=50)
if p is None:
return sp, env_05, \
np.zeros(shape=len(sp), dtype=np.float), np.zeros(shape=len(sp), dtype=np.float)
else:
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=100-p)
return sp, env_05, env_1, env_2
# Computes Ripley's crossed function in H and updates the correspondent lists, only Goreaud border
# compensation is allowed
# n: number of samples
# max_d: max distance for being considered
# Returns: Ripley's K values and samples respectively
def __ripleys_H_cross(self, n, max_d):
# Initialization
self.__h = list()
self.__hx = list()
box = self.__box
# Ripleys H computation
for cloud_a, cloud_b in zip(self.__clouds_a, self.__clouds_b):
if self.__fwd:
hold_h, hold_x = self.__ripley_cross_goreaud(cloud_a, cloud_b, box, n, max_d)
else:
hold_h, hold_x = self.__ripley_cross_goreaud(cloud_b, cloud_a, box, n, max_d)
# Low pass filtering
self.__hcr.append(lfilter(self.__lpf[0], self.__lpf[1], hold_h))
# self.__h.append(hold_h)
self.__hx.append(hold_x)
# Computes Ripley's function in H of the union of the pairs
# n: number of samples
# max_d: max distance for being considered
# Returns: Ripley's K values and samples respectively
def __ripleys_H_comp(self, n, max_d, border=0):
# Initialization
self.__h = list()
self.__hx = list()
box = self.__box
# Ripleys H computation
for cloud_a, cloud_b in zip(self.__clouds_a, self.__clouds_b):
hold_h, hold_x = self.__ripley_goreaud(np.concatenate((cloud_a, cloud_b), axis=0), box, n, max_d)
# Low pass filtering
self.__hcp.append(lfilter(self.__lpf[0], self.__lpf[1], hold_h))
# self.__h.append(hold_h)
self.__hx.append(hold_x)
# Edge compensation as Goreaud specifies [J. Vegetation Sci. 10: 433-438, 1999]
# cloud: cloud of points
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley_goreaud(self, cloud, box, n, max_d):
# Initialization
pi_2 = 2 * np.pi
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley (SetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(cloud.shape[0])
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
if r == 0:
continue
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud[i] - cloud
dists = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where((dists > 0) & (dists < r))[0]
# Loop for neighbours
p = cloud[i, :]
weights = np.ones(shape=len(ids), dtype=np.float)
# Distance to edges
hold_dists = list()
hold_dists.append(box[2] - p[0])
hold_dists.append(p[1] - box[1])
hold_dists.append(p[0] - box[0])
hold_dists.append(box[3] - p[1])
hold_dists = np.asarray(hold_dists, dtype=np.float)
hold_dists = np.sqrt(hold_dists * hold_dists)
hold_dists = np.sort(hold_dists)
d1, d2, d3, d4 = hold_dists[0], hold_dists[1], hold_dists[2], hold_dists[3]
for j, idx in enumerate(ids):
# Compute distance to neighbour
pn = cloud[idx, :]
hold_r = p - pn
rj = math.sqrt((hold_r * hold_r).sum())
#### Edge compensation
# Switch for computing angle
if (rj > d1) and (rj <= d2) and (rj <= d3) and (rj <= d4):
alpha = 2 * math.acos(d1 / rj)
elif (rj > d1) and (rj > d2) and (rj <= d3) and (rj <= d4):
dh = d1*d1 + d2*d2
r2 = rj * rj
if r2 <= dh:
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj)
else:
alpha = .5*np.pi + math.acos(d1/r) + math.acos(d2/r)
elif (rj > d1) and (rj > d3) and (rj <= d2) and (rj <= d4):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d3/rj)
elif (rj > d1) and (rj > d2) and (rj > d3) and (rj <= d4):
d12 = d1*d1 + d2*d2
d23 = d2*d2 + d3*d3
r2 = rj * rj
if (r2 <= d12) and (r2 <= d23):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj) + 2*math.acos(d3/rj)
elif (r2 <= d12) and (r2 > d23):
alpha = .5*np.pi + 2*math.acos(d1/rj) + math.acos(d2/rj) + math.acos(d3/rj)
else:
alpha = np.pi + math.acos(d1/rj) + math.acos(d3/rj)
else:
alpha = .0
# Correcting factor
if alpha > pi_2:
weights[j] = 0.
else:
weights[j] = pi_2 / (pi_2 - alpha)
# Updating K entry
K[k] += (weights.sum())
# Compute the H form
# return np.sqrt((area*K) / (np.pi*N*(N-1))) - rd, rd
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
# Crossed Ripley's H form coputation with edge compensation as Goreaud specifies
# [J. Vegetation Sci. 10: 433-438, 1999]
# cloud_a: cloud of points for taking the measures
# cloud_b: cloud of points working as neighbours
# box: only points within this box are considered for k-function, the rest are only
# considered for edge correction
# n: number of output samples
# max_d: maximum distance
# Returns: Ripley's H form and the radius samples
def __ripley_cross_goreaud(self, cloud_a, cloud_b, box, n, max_d):
# Initialization
pi_2 = 2 * np.pi
side_a = float(box[2] - box[0])
side_b = float(box[3] - box[1])
if (max_d > side_a) or (max_d > side_b):
error_msg = 'Ripley''s metric cannot be computed because max_d is greater than a cloud box dimension'
raise pexceptions.PySegInputError(expr='__ripley_cross_goreaud (PairSetClouds)', msg=error_msg)
area = side_a * side_b
rd = np.linspace(0, max_d, n)
N = float(cloud_a.shape[0])
K = np.zeros(shape=n, dtype=np.float)
if N <= 1:
return K, rd
# Cluster radius loop
for k, r in enumerate(rd):
if r == 0:
continue
# Points loop
for i in range(int(N)):
# Finding neighbours
hold = cloud_a[i] - cloud_b
dists = np.sqrt(np.sum(hold*hold, axis=1))
ids = np.where((dists > 0) & (dists < r))[0]
# Loop for neighbours
p = cloud_a[i, :]
weights = np.ones(shape=len(ids), dtype=np.float)
# Distance to edges
hold_dists = list()
hold_dists.append(box[2] - p[0])
hold_dists.append(p[1] - box[1])
hold_dists.append(p[0] - box[0])
hold_dists.append(box[3] - p[1])
hold_dists = np.asarray(hold_dists, dtype=np.float)
hold_dists = np.sqrt(hold_dists * hold_dists)
hold_dists = np.sort(hold_dists)
d1, d2, d3, d4 = hold_dists[0], hold_dists[1], hold_dists[2], hold_dists[3]
for j, idx in enumerate(ids):
# Compute distance to neighbour
pn = cloud_b[idx, :]
hold_r = p - pn
rj = math.sqrt((hold_r * hold_r).sum())
#### Edge compensation
# Switch for computing angle
if (rj > d1) and (rj <= d2) and (rj <= d3) and (rj <= d4):
alpha = 2 * math.acos(d1 / rj)
elif (rj > d1) and (rj > d2) and (rj <= d3) and (rj <= d4):
dh = d1*d1 + d2*d2
r2 = rj * rj
if r2 <= dh:
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj)
else:
alpha = .5*np.pi + math.acos(d1/r) + math.acos(d2/r)
elif (rj > d1) and (rj > d3) and (rj <= d2) and (rj <= d4):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d3/rj)
elif (rj > d1) and (rj > d2) and (rj > d3) and (rj <= d4):
d12 = d1*d1 + d2*d2
d23 = d2*d2 + d3*d3
r2 = rj * rj
if (r2 <= d12) and (r2 <= d23):
alpha = 2*math.acos(d1/rj) + 2*math.acos(d2/rj) + 2*math.acos(d3/rj)
elif (r2 <= d12) and (r2 > d23):
alpha = .5*np.pi + 2*math.acos(d1/rj) + math.acos(d2/rj) + math.acos(d3/rj)
else:
alpha = np.pi + math.acos(d1/rj) + math.acos(d3/rj)
else:
alpha = .0
# Correcting factor
if alpha > pi_2:
weights[j] = 0.
else:
weights[j] = pi_2 / (pi_2 - alpha)
# Updating K entry
K[k] += (weights.sum())
# Compute the H form
return np.sqrt((area*K) / (np.pi*N*N)) - rd, rd
###########################################################################################
# Class for finding and analyzing filaments in cloud of points on a plane
# VERY IMPORTANT: only valid for 2D data
###########################################################################################
class NetFilCloud(object):
# cloud: cloud of points (2D array with n points)
# res: resolution in nm
# k: number of nearest neighbours (default 1) for building the graph through knn
# e_len: edge maximum length
# min_len: minimum length for the Filaments (default 0)
# max_len: maximum length for the Filaments (default MAX_FLOAT)
def __init__(self, cloud, res, k=1, e_len=MAX_FLOAT, min_len=0, max_len=MAX_FLOAT):
self.__cloud = cloud
self.__res = res
self.__e_len = e_len
self.__graph = self.__build_graph(cloud, k, e_len)
self.__min_len = min_len
self.__max_len = max_len
self.__fils = list()
self.__find_fils()
#### Get/Set methods
def get_filaments(self):
return self.__fils
# Returns the number of different vertices which compound the filament network
def get_num_fil_vertices(self):
cont = 0
lut = np.ones(shape=self.__graph.num_vertices(), dtype=np.bool)
for fil in self.__fils:
for v in fil.get_vertices():
if lut[int(v)]:
cont += 1
return cont
# Returns the number of different edges which compound the filament network
def get_num_fil_edges(self):
cont = 0
n_verts = self.__graph.num_vertices()
lut = np.ones(shape=(n_verts, n_verts), dtype=np.bool)
for fil in self.__fils:
for e in fil.get_edges():
s, t = int(e.source()), (e.target())
if lut[s, t] and lut[t, s]:
cont += 1
lut[s, t], lut[t, s] = True, True
return cont
def get_graph_vtp(self):
# Initialization
point_id = 0
cell_id = 0
points = vtk.vtkPoints()
verts = vtk.vtkCellArray()
lines = vtk.vtkCellArray()
cell_data = vtk.vtkIntArray()
cell_data.SetNumberOfComponents(1)
cell_data.SetName(STR_CELL)
len_data = vtk.vtkFloatArray()
len_data.SetNumberOfComponents(1)
len_data.SetName(STR_2GT_EL)
# Write vertices
for p in self.__cloud:
verts.InsertNextCell(1)
points.InsertPoint(point_id, p[0], p[1], 0)
verts.InsertCellPoint(point_id)
point_id += 1
cell_id += 1
cell_data.InsertNextTuple((cell_id,))
len_data.InsertNextTuple((NO_CONNECTION,))
# Write edges
for e in self.__graph.edges():
lines.InsertNextCell(2)
s = self.__cloud[int(e.source())]
t = self.__cloud[int(e.target())]
points.InsertPoint(point_id, s[0], s[1], 0)
lines.InsertCellPoint(point_id)
point_id += 1
points.InsertPoint(point_id, t[0], t[1], 0)
lines.InsertCellPoint(point_id)
point_id += 1
cell_id += 1
cell_data.InsertNextTuple((cell_id,))
length = s - t
len_data.InsertNextTuple((math.sqrt((length*length).sum()),))
# Poly building
poly = vtk.vtkPolyData()
poly.SetPoints(points)
poly.SetVerts(verts)
poly.SetLines(lines)
poly.GetCellData().AddArray(cell_data)
poly.GetCellData().AddArray(len_data)
return poly
def get_fils_vtp(self):
# Initialization
point_id = 0
cell_id = 0
points = vtk.vtkPoints()
verts = vtk.vtkCellArray()
lines = vtk.vtkCellArray()
cell_data = vtk.vtkIntArray()
cell_data.SetNumberOfComponents(1)
cell_data.SetName(STR_CELL)
len_data = vtk.vtkFloatArray()
len_data.SetNumberOfComponents(1)
len_data.SetName(STR_2FIL_LEN)
ct_data = vtk.vtkFloatArray()
ct_data.SetNumberOfComponents(1)
ct_data.SetName(STR_2FIL_CT)
sin_data = vtk.vtkFloatArray()
sin_data.SetNumberOfComponents(1)
sin_data.SetName(STR_2FIL_SIN)
smo_data = vtk.vtkFloatArray()
smo_data.SetNumberOfComponents(1)
smo_data.SetName(STR_2FIL_SMO)
mc_data = vtk.vtkFloatArray()
mc_data.SetNumberOfComponents(1)
mc_data.SetName(STR_2FIL_MC)
# Write vertices
for p in self.__cloud:
verts.InsertNextCell(1)
points.InsertPoint(point_id, p[0], p[1], 0)
verts.InsertCellPoint(point_id)
point_id += 1
cell_id += 1
cell_data.InsertNextTuple((cell_id,))
len_data.InsertNextTuple((-1,))
ct_data.InsertNextTuple((-1,))
sin_data.InsertNextTuple((-1,))
smo_data.InsertNextTuple((-1,))
mc_data.InsertNextTuple((-1,))
# Write lines
for i, f in enumerate(self.__fils):
# Getting children if demanded
coords = f.get_coords()
lines.InsertNextCell(coords.shape[0])
for c in coords:
points.InsertPoint(point_id, c[0], c[1], 0)
lines.InsertCellPoint(point_id)
point_id += 1
cell_id += 1
cell_data.InsertNextTuple((cell_id,))
len_data.InsertNextTuple((f.get_length(),))
ct_data.InsertNextTuple((f.get_total_curvature(),))
sin_data.InsertNextTuple((f.get_sinuosity(),))
smo_data.InsertNextTuple((f.get_smoothness(),))
mc_data.InsertNextTuple((f.get_max_curvature(),))
# Poly building
poly = vtk.vtkPolyData()
poly.SetPoints(points)
poly.SetVerts(verts)
poly.SetLines(lines)
poly.GetCellData().AddArray(cell_data)
poly.GetCellData().AddArray(len_data)
poly.GetCellData().AddArray(ct_data)
poly.GetCellData().AddArray(sin_data)
poly.GetCellData().AddArray(smo_data)
poly.GetCellData().AddArray(mc_data)
return poly
# Generates a window where the network is rendered
# mode= If 1 (default) the graph is render, otherwise the filament network
def render(self, mode=1):
# create a rendering window and renderer
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
# create a renderwindowinteractor
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Get source
if mode == 1:
source = self.get_graph_vtp()
else:
source = self.get_fils_vtp()
# actor for vertices
verts = vtk.vtkPolyData()
verts.SetPoints(source.GetPoints())
verts.SetVerts(source.GetVerts())
mapper_v = vtk.vtkPolyDataMapper()
mapper_v.SetInputData(verts)
actor_v = vtk.vtkActor()
actor_v.SetMapper(mapper_v)
actor_v.GetProperty().SetColor(1,0,0) # (R,G,B)
# actor for lines
lines = vtk.vtkPolyData()
lines.SetPoints(source.GetPoints())
lines.SetLines(source.GetLines())
mapper_l = vtk.vtkPolyDataMapper()
mapper_l.SetInputData(lines)
actor_l = vtk.vtkActor()
actor_l.SetMapper(mapper_l)
actor_l.GetProperty().SetColor(0,1,0) # (R,G,B)
# assign actors to the renderer
ren.AddActor(actor_v)
ren.AddActor(actor_l)
# enable user interface interactor
iren.Initialize()
renWin.Render()
iren.Start()
#### External functionality area
# th_*: threshold objects (logical and is applied to all thresholds)
def threshold_and_fils(self, th_len=None, th_ct=None, th_sin=None, th_smo=None, th_mc=None):
# LUT for marking filaments to delete
del_lut = np.ones(shape=len(self.__fils), dtype=np.bool)
# Loop for filaments
for i, fil in enumerate(self.__fils):
if th_len is not None:
if not th_len.test(fil.get_length()):
continue
if th_ct is not None:
if not th_ct.test(fil.get_total_curvature()):
continue
if th_sin is not None:
if not th_sin.test(fil.get_sinuosity()):
continue
if th_smo is not None:
if not th_smo.test(fil.get_smoothness()):
continue
if th_mc is not None:
if not th_mc.test(fil.get_max_curvature()):
continue
del_lut[i] = False
# Copy just filaments passed all tests
hold_fils = self.__fils
self.__fils = list()
for i, fil in enumerate(hold_fils):
if not del_lut[i]:
self.__fils.append(fil)
#### Internal functionality area
# Build a GraphGT by knn criterion
def __build_graph(self, cloud, k, e_len=MAX_FLOAT):
if cloud.shape[0] < (k+1):
error_msg = 'The number of point of the cloud must be greater than k =' + str(k)
raise pexceptions.PySegInputError(expr='__build_graph (NetFilCloud)', msg=error_msg)
# Graph initialization
graph = gt.Graph(directed=False)
graph.add_vertex(cloud.shape[0])
lengths = list()
# Applying kNN criteria
for i, p in enumerate(cloud):
# point distance to all cloud points
hold = p - cloud
# Getting the k-neighbours
dists = np.sqrt(np.sum(hold * hold, axis=1))
ids = np.argsort(dists)[1:k+1]
# Add the edges
for idx in ids:
dist = dists[idx]
if dist > e_len:
break
lengths.append(dists[idx])
graph.add_edge(i, idx)
# Set edge length as the euclidean distance
graph.edge_properties[STR_2GT_EL] = graph.new_edge_property('float')
graph.edge_properties[STR_2GT_EL].get_array()[:] = np.asarray(lengths, dtype=np.float)
return graph
def __find_fils(self):
# Visiting procedure initialization
n_vertices = self.__graph.num_vertices()
connt = np.zeros(shape=(n_vertices, n_vertices), dtype=np.bool)
prop_con = self.__graph.edge_properties[STR_2GT_EL]
# Main loop for finding filaments at every vertex
for source in self.__graph.vertices():
# An isolated vertex cannot be a Filament
if sum(1 for _ in source.all_edges()) <= 0:
continue
# Search filaments in source neighbourhood
visitor = FilVisitor2(self.__graph, source, self.__min_len, self.__max_len)
gt.dijkstra_search(self.__graph, source, prop_con, visitor)
hold_v_paths, hold_e_paths = visitor.get_paths()
# Build the filaments
for i, v_path in enumerate(hold_v_paths):
head, tail = v_path[0], v_path[-1]
head_i, tail_i = int(head), int(v_path[-1])
if not(connt[head_i, tail_i]) and not(connt[tail_i, head_i]):
v_list = list()
e_list = list()
e_path = hold_e_paths[i]
for j in range(len(v_path) - 1):
v_list.append(v_path[j])
e_list.append(e_path[j])
v_list.append(v_path[-1])
# Building a filament
self.__fils.append(FilamentU(self.__graph, self.__cloud,
v_list, e_list, self.__res))
# Set as unconnected already processed pair of vertices
connt[head_i, tail_i] = True
connt[tail_i, head_i] = True
###########################################################################################
# Class for modelling a filament (unoriented curve in a plane) (input graph is GraphGT)
###########################################################################################
class FilamentU(object):
# graph: parent GraphGT
# vertices: list of ordered vertices for the whole GraphGT
# coords: list of vertex coordinates
# edge: list of ordered edges, v{i} -> e{i} -> v{i+1}
# res: resolution in nm
def __init__(self, graph, coords, vertices, edges, res):
self.__graph = graph
self.__vertices = vertices
self.__coords = self.__get_path_coords(coords)
self.__edges = edges
self.__res = res
#### Set/Get methods area
def get_edges(self):
return self.__edges
def get_vertices(self):
return self.__vertices
def get_num_vertices(self):
return len(self.__vertices)
def get_head(self):
return self.__vertices[0]
def get_tail(self):
return self.__vertices[-1]
# Return filament path coordinates order from head to tail
def get_coords(self):
return self.__coords
def get_length(self):
length = 0.
coords = self.get_coords()
for i in range(coords.shape[0] - 1):
x1, y1 = coords[i, 0], coords[i, 1]
x2, y2 = coords[i+1, 0], coords[i+1, 1]
hold = np.asarray((x1-x2, y1-y2), dtype=np.float)
length += math.sqrt(np.sum(hold*hold))
return length * self.__res
def get_head_tail_dist(self):
hold = np.asarray((self.__coords[0][0]-self.__coords[-1][0],
self.__coords[0][1]-self.__coords[-1][1],), dtype=np.float)
return math.sqrt(np.sum(hold*hold)) * self.__res
# Computes total curvature
def get_total_curvature(self):
# Getting curve coordinates in space
curve = self.__coords * self.__res
# Computing curvatures
curvatures = compute_plane_k(curve)
# Curvature integral
total_k = 0.
for i in range(1, curve.shape[0]-1):
v_i_l1, v_i, v_i_p1 = curve[i-1, :], curve[i, :], curve[i+1, :]
h_1 = v_i_p1 - v_i
h_2 = v_i-v_i_l1
h_1 = math.sqrt(h_1[0]*h_1[0] + h_1[1]*h_1[1])
h_2 = math.sqrt(h_2[0]*h_2[0] + h_2[1]*h_2[1])
total_k += (0.5 * (h_1 + h_2) * curvatures[i-1])
return total_k
# Computes a smoothness metric based on total curvature
def get_smoothness(self):
# Getting curve coordinates in space
curve = self.__coords * self.__res
# Computing curvatures
curvatures = compute_plane_k(curve)
# Square for avoiding orientation information
curvatures *= curvatures
# Curvature integral
total_k = 0.
for i in range(1, curve.shape[0]-1):
v_i_l1, v_i, v_i_p1 = curve[i-1, :], curve[i, :], curve[i+1, :]
h_1 = v_i_p1 - v_i
h_2 = v_i-v_i_l1
h_1 = math.sqrt(h_1[0]*h_1[0] + h_1[1]*h_1[1])
h_2 = math.sqrt(h_2[0]*h_2[0] + h_2[1]*h_2[1])
hold = (0.5 * (h_1 + h_2) * curvatures[i-1])
total_k += (hold * hold)
return total_k
# Computes maximum local curvature along the whole network
def get_max_curvature(self):
# Getting curve coordinates in space
curve = self.__coords * self.__res
# Computing curvatures
curvatures = np.absolute(compute_plane_k(curve))
# Maximum
return curvatures.max()
# Computes curve sinuosity (ratio between length and distance between extremes)
def get_sinuosity(self):
length = self.get_length()
if length == 0:
return 0
dst = self.get_head_tail_dist()
if dst == 0:
return 0
else:
return length / dst
##### Internal functionality area
def __get_path_coords(self, cloud):
n_v = len(self.__vertices)
coords = np.zeros(shape=(n_v, 2), dtype=np.float)
for i in range(n_v):
coords[i, :] = cloud[int(self.__vertices[i]), :]
return coords
###########################################################################################
# Class for analyzing groups of clouds
###########################################################################################
class GroupClouds(object):
# box: unique bounding box
# n_samp: number of samples for graphs
# n_sim_f: number of simulations for generating F and G functions
# max_d: maximum distance for Ripley's metrics
# p_f: percentile for F and G simulations test, if None (default) tests are not done
def __init__(self, n_samp, n_sim_f, max_d, p_f=None):
self.__n = n_samp
self.__nsim_f = n_sim_f
self.__max_d = max_d
self.__p_f = p_f
self.__groups_cloud = list()
self.__groups_boxes = list()
self.__names = list()
self.__groups_g = list()
self.__groups_f = list()
self.__groups_h = list()
self.__groups_hp = list()
self.__groups_wh = list()
self.__groups_whp = list()
self.__gs = None
self.__gsx = None
self.__gsl = None
self.__gsh = None
self.__fs = None
self.__fsx = None
self.__fsl = None
self.__fsh = None
self.__cc_lbls = list()
self.__cch = None
self.__cchp = None
# Low pass filter for differentials
b, a = butter(LP_ORDER, LP_NORM_CUTOFF, btype='low', analog=False)
self.__lpf = (b, a)
# Get/Set functionality
# External functionality
# clouds: list of clouds
# boxes: list of boxes
# name: string with group name
def insert_group(self, clouds, boxes, name):
self.__groups_cloud.append(clouds)
self.__groups_boxes.append(boxes)
self.__names.append(name)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
def analyze_1(self, verbose=False):
if verbose:
sys.stdout.write('Progress analysis level 1: 0% ... ')
for group in self.__groups_cloud:
g, gx = self.__group_function_G(group, self.__n)
self.__groups_g.append((g, gx))
# Making plane the list of groups
p_groups = list()
p_boxes = list()
for (group, boxes) in zip(self.__groups_cloud, self.__groups_boxes):
for (cloud, box) in zip(group, boxes):
p_groups.append(cloud)
p_boxes.append(box)
if verbose:
sys.stdout.write('25% ... ')
if self.__p_f is not None:
self.__gsx, self.__gsl, self.__gsm, self.__gsh = self.__rand_group_function_G(p_groups,
p_boxes,
self.__n,
self.__nsim_f,
self.__p_f)
if verbose:
sys.stdout.write('50% ... ')
for (group, boxes) in zip(self.__groups_cloud, self.__groups_boxes):
f, fx = self.__group_function_F(group, boxes, self.__n, self.__nsim_f)
self.__groups_f.append((f, fx))
if verbose:
sys.stdout.write('75% ... ')
if self.__p_f is not None:
self.__fsx, self.__fsl, self.__fsm, self.__fsh = self.__rand_group_function_F(p_groups,
p_boxes,
self.__n,
self.__nsim_f,
self.__p_f)
if verbose:
print('100%')
def analyze_2(self, verbose=False):
if verbose:
sys.stdout.write('Progress analysis level 2: 0% ... ')
tot = 1
for boxes in self.__groups_boxes:
tot += len(boxes)
# Compute Ripleys H for every cloud
cont = 0
for (clouds, boxes) in zip(self.__groups_cloud, self.__groups_boxes):
group_h = list()
group_hp = list()
for (cloud, box) in zip(clouds, boxes):
h, hx = ripley_goreaud(cloud, box, self.__n, self.__max_d)
h_f = lfilter(self.__lpf[0], self.__lpf[1], h)
group_h.append((h_f, hx))
group_hp.append((np.gradient(h_f, hx[1] - hx[0]), hx))
cont += 1
pct = 100. * (float(cont) / float(tot))
sys.stdout.write(str(round(pct, 1)) + '% ... ')
self.__groups_h.append(group_h)
self.__groups_hp.append(group_hp)
# Compute Ripleys for every group
for (h_pairs, clouds) in zip(self.__groups_h, self.__groups_cloud):
# Compute weights
weights = np.zeros(shape=len(clouds), dtype=np.float)
for i, cloud in enumerate(clouds):
weights[i] = cloud.shape[0]
weights /= weights.sum()
# Compute averages
ha = np.zeros(shape=h_pairs[0][0].shape, dtype=np.float)
hpa = np.zeros(shape=h_pairs[0][0].shape, dtype=np.float)
for i in range(len(h_pairs)):
ha += (weights[i] * h_pairs[i][0])
hpa += (weights[i] * np.gradient(h_pairs[i][0], h_pairs[i][1][1] - h_pairs[i][1][0]))
ha_x = h_pairs[0][1]
self.__groups_wh.append((ha, ha_x))
self.__groups_whp.append((hpa, ha_x))
# Computing cross-correlation coefficients
h_mat = np.zeros(shape=(tot-1, self.__n), dtype=np.float)
hp_mat = np.zeros(shape=(tot-1, self.__n), dtype=np.float)
cont = 0
for i in range(len(self.__groups_h)):
for j in range(len(self.__groups_h[i])):
h_mat[cont, :] = self.__groups_h[i][j][0]
hp_mat[cont, :] = self.__groups_hp[i][j][0]
self.__cc_lbls.append(i)
cont += 1
self.__cch = np.corrcoef(h_mat)
self.__cchp = np.corrcoef(hp_mat)
if verbose:
print('100%')
# Plot into figures the current analysis level 1 state
# block: if True (default False) waits for closing windows for finishing the execution
def plot_1(self, block=False):
if len(self.__groups_g) == 0:
if len(self.__names) == 0:
print('WARNING: no groups added, run insert_group() and analyze_1() first!')
else:
print('WARNING: run analyze_1() first!')
# Initialization
fig_count = 0
if block:
plt.ion()
width = 0.35
ind = np.arange(len(self.__names))
color = cm.rainbow(np.linspace(0, 1, len(self.__names)))
# Plot densities
fig_count += 1
ax = plt.figure(fig_count).add_subplot(111)
plt.title('Averaged Nearest Neighbour Distance')
plt.xlabel('Group')
plt.ylabel('ANN (nm)')
means = list()
stds = list()
for (group, lbl) in zip(self.__groups_g, self.__names):
means.append(np.mean(group[0]))
stds.append(np.std(group[0]))
bars1 = plt.bar(ind, np.asarray(means, dtype=np.float), width, color='b')
bars2 = plt.bar(ind+width, np.asarray(stds, dtype=np.float), width, color='r')
ax.set_xticks(ind + width)
ax.set_xticklabels(self.__names)
ax.legend((bars1[0], bars2[0]), ('Mean', 'Std'))
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__p_f is not None:
plt.plot(self.__gsx, self.__gsm, 'k')
plt.plot(self.__gsx, self.__gsl, 'k--')
plt.plot(self.__gsx, self.__gsh, 'k--')
lines = list()
for (group, lbl, c) in zip(self.__groups_g, self.__names, color):
line, = plt.plot(group[1], group[0], c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# Plot F-Function
fig_count += 1
plt.figure(fig_count)
plt.title('F-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('F')
plt.ylim(0, 1)
if self.__p_f is not None:
plt.plot(self.__fsx, self.__fsm, 'k')
plt.plot(self.__fsx, self.__fsl, 'k--')
plt.plot(self.__fsx, self.__fsh, 'k--')
lines = list()
for (group, lbl, c) in zip(self.__groups_f, self.__names, color):
line, = plt.plot(group[1], group[0], c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
# Show
plt.show(block=block)
# Plot into figures the current analysis level 1 state
# path: path to the folder where figures will be stored
def store_figs_1(self, path):
if len(self.__groups_g) == 0:
if len(self.__names) == 0:
print('WARNING: no groups added, run insert_group() and analyze_1() first!')
else:
print('WARNING: run analyze_1() first!')
# Initialization
fig_count = 0
width = 0.35
ind = np.arange(len(self.__names))
color = cm.rainbow(np.linspace(0, 1, len(self.__names)))
# Plot densities
fig_count += 1
ax = plt.figure(fig_count).add_subplot(111)
plt.title('Averaged Nearest Neighbour Distance')
plt.xlabel('Group')
plt.ylabel('ANN (nm)')
means = list()
stds = list()
for (group, lbl) in zip(self.__groups_g, self.__names):
means.append(np.mean(group[0]))
stds.append(np.std(group[0]))
bars1 = plt.bar(ind, np.asarray(means, dtype=np.float), width, color='b')
bars2 = plt.bar(ind+width, np.asarray(stds, dtype=np.float), width, color='r')
ax.set_xticks(ind + width)
ax.set_xticklabels(self.__names)
ax.legend((bars1[0], bars2[0]), ('Mean', 'Std'))
plt.savefig(path + '/annd.png')
plt.close()
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('G-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('G')
plt.ylim(0, 1)
if self.__p_f is not None:
plt.plot(self.__gsx, self.__gsm, 'k')
plt.plot(self.__gsx, self.__gsl, 'k--')
plt.plot(self.__gsx, self.__gsh, 'k--')
lines = list()
for (group, lbl, c) in zip(self.__groups_g, self.__names, color):
line, = plt.plot(group[1], group[0], c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
plt.savefig(path + '/g.png')
plt.close()
# Plot F-Function
fig_count += 1
plt.figure(fig_count)
plt.title('F-Function')
plt.xlabel('Distance (nm)')
plt.ylabel('F')
plt.ylim(0, 1)
if self.__p_f is not None:
plt.plot(self.__fsx, self.__fsm, 'k')
plt.plot(self.__fsx, self.__fsl, 'k--')
plt.plot(self.__fsx, self.__fsh, 'k--')
lines = list()
for (group, lbl, c) in zip(self.__groups_f, self.__names, color):
line, = plt.plot(group[1], group[0], c=c, label=lbl)
lines.append(line)
if len(lines) > 0:
plt.legend(handles=lines)
plt.savefig(path + '/f.png')
plt.close()
# Plot into figures the current analysis level 1 state
# block: if True (default False) waits for closing windows for finishing the execution
def plot_2(self, block=False):
if len(self.__groups_h) == 0:
if len(self.__names) == 0:
print('WARNING: no groups added, run insert_group() and analyze_2() first!')
else:
print('WARNING: run analyze_2() first!')
# Initialization
fig_count = 0
if block:
plt.ion()
# Plot individual Ripley's H
for (h_pairs, name) in zip(self.__groups_h, self.__names):
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H for group ' + str(name))
plt.xlabel('Radius (nm)')
plt.ylabel('H')
cont = 1
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(h_pairs)))
for (h_pair, c) in zip(h_pairs, color):
line, = plt.plot(h_pair[1], h_pair[0], c=c, label=str(cont))
lines.append(line)
cont += 1
plt.legend(handles=lines)
# Plot individual Ripley's H'
for (hp_pairs, name) in zip(self.__groups_hp, self.__names):
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H\' for group ' + str(name))
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
cont = 1
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(h_pairs)))
for (hp_pair, c) in zip(hp_pairs, color):
line, = plt.plot(hp_pair[1], hp_pair[0], c=c, label=str(cont))
lines.append(line)
cont += 1
plt.legend(handles=lines)
# Plot weighted Ripleys'H
fig_count += 1
plt.figure(fig_count)
plt.title('Weighted Ripley\'s H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(self.__groups_wh)))
for (wh_pair, name, c) in zip(self.__groups_wh, self.__names, color):
line, = plt.plot(wh_pair[1], wh_pair[0], c=c, label=name)
lines.append(line)
plt.legend(handles=lines)
# Plot weighted Ripleys'H
fig_count += 1
plt.figure(fig_count)
plt.title('Weighted Ripley\'s H\'')
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
lines = list()
for (whp_pair, name, c) in zip(self.__groups_whp, self.__names, color):
line, = plt.plot(whp_pair[1], whp_pair[0], c=c, label=name)
lines.append(line)
plt.legend(handles=lines)
# Plot cross-correlation for Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Cross-correlation matrix for Ripley\'s H')
plt.xlim(0, self.__cch.shape[0])
plt.ylim(0, self.__cch.shape[1])
plt.pcolor(self.__cch, cmap='jet', vmin=-1, vmax=1)
plt.colorbar()
plt.xticks(np.arange(.5, self.__cch.shape[0]+.5), self.__cc_lbls)
plt.yticks(np.arange(.5, self.__cch.shape[1]+.5), self.__cc_lbls)
# Plot cross-correlation for Ripley's H'
fig_count += 1
plt.figure(fig_count)
plt.title('Cross-correlation matrix for Ripley\'s H\'')
plt.xlim(0, self.__cchp.shape[0])
plt.ylim(0, self.__cchp.shape[1])
plt.pcolor(self.__cchp, cmap='jet', vmin=-1, vmax=1)
plt.colorbar()
plt.xticks(np.arange(.5, self.__cchp.shape[0]+.5), self.__cc_lbls)
plt.yticks(np.arange(.5, self.__cchp.shape[1]+.5), self.__cc_lbls)
# Show
plt.show(block=block)
# Store figures the current analysis level 2 state
# path: path to the folder where figures will be stored
# plt_cl: if True (default False) clouds coordinates are stored
def store_figs_2(self, path, plt_cl=True):
if len(self.__groups_h) == 0:
if len(self.__names) == 0:
print('WARNING: no groups added, run insert_group() and analyze_1() first!')
else:
print('WARNING: run analyze_2() first!')
# Initialization
fig_count = 0
# Plot individual Ripley's H
for (h_pairs, name) in zip(self.__groups_h, self.__names):
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H for group ' + name)
plt.xlabel('Radius (nm)')
plt.ylabel('H')
cont = 1
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(h_pairs)))
for (h_pair, c) in zip(h_pairs, color):
line, = plt.plot(h_pair[1], h_pair[0], c=c, label=str(cont))
lines.append(line)
cont += 1
plt.legend(handles=lines)
plt.savefig(path + '/' + name + '_h.png')
plt.close()
# Plot individual Ripley's H'
for (hp_pairs, name) in zip(self.__groups_hp, self.__names):
# Plot G-Function
fig_count += 1
plt.figure(fig_count)
plt.title('Ripley\'s H\' for group ' + str(name))
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
cont = 1
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(h_pairs)))
for (hp_pair, c) in zip(hp_pairs, color):
line, = plt.plot(hp_pair[1], hp_pair[0], c=c, label=str(cont))
lines.append(line)
cont += 1
plt.legend(handles=lines)
plt.savefig(path + '/' + name + '_hp.png')
plt.close()
# Plot weighted Ripleys'H
fig_count += 1
plt.figure(fig_count)
plt.title('Weighted Ripley\'s H')
plt.xlabel('Radius (nm)')
plt.ylabel('H')
lines = list()
color = cm.rainbow(np.linspace(0, 1, len(self.__groups_wh)))
for (wh_pair, name, c) in zip(self.__groups_wh, self.__names, color):
line, = plt.plot(wh_pair[1], wh_pair[0], c=c, label=name)
lines.append(line)
plt.legend(handles=lines)
plt.savefig(path + '/wh.png')
plt.close()
# Plot weighted Ripleys'H
fig_count += 1
plt.figure(fig_count)
plt.title('Weighted Ripley\'s H\'')
plt.xlabel('Radius (nm)')
plt.ylabel('H\'')
lines = list()
for (whp_pair, name, c) in zip(self.__groups_whp, self.__names, color):
line, = plt.plot(whp_pair[1], whp_pair[0], c=c, label=name)
lines.append(line)
plt.legend(handles=lines)
plt.savefig(path + '/whp.png')
plt.close()
# Plot cross-correlation for Ripley's H
fig_count += 1
plt.figure(fig_count)
plt.title('Cross-correlation matrix for Ripley\'s H')
plt.xlim(0, self.__cch.shape[0])
plt.ylim(0, self.__cch.shape[1])
plt.pcolor(self.__cch, cmap='jet', vmin=-1, vmax=1)
plt.colorbar()
plt.xticks(np.arange(.5, self.__cch.shape[0]+.5), self.__cc_lbls)
plt.yticks(np.arange(.5, self.__cch.shape[1]+.5), self.__cc_lbls)
plt.savefig(path + '/cch.png')
plt.close()
# Plot cross-correlation for Ripley's H'
fig_count += 1
plt.figure(fig_count)
plt.title('Cross-correlation matrix for Ripley\'s H\'')
plt.xlim(0, self.__cchp.shape[0])
plt.ylim(0, self.__cchp.shape[1])
plt.pcolor(self.__cchp, cmap='jet', vmin=-1, vmax=1)
plt.colorbar()
plt.xticks(np.arange(.5, self.__cchp.shape[0]+.5), self.__cc_lbls)
plt.yticks(np.arange(.5, self.__cchp.shape[1]+.5), self.__cc_lbls)
plt.savefig(path + '/cchp.png')
plt.close()
# Plot clouds
if plt_cl:
for (clouds, boxes, name) in zip(self.__groups_cloud, self.__groups_boxes, self.__names):
figs_dir = path + '/' + name + '_clouds'
if os.path.isdir(figs_dir):
shutil.rmtree(figs_dir)
os.makedirs(figs_dir)
cont = 1
for (cloud, box) in zip(clouds, boxes):
fig_count += 1
plt.figure(fig_count)
plt.title('Clouds of points group ' + name + ' entry ' + str(cont))
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.axis('scaled')
plt.xlim(box[0], box[2])
plt.ylim(box[1], box[3])
plt.scatter(cloud[:, 0], cloud[:, 1])
plt.savefig(figs_dir + '/' + str(cont) + '.png')
plt.close()
cont += 1
##### Internal functionality area
# Computes G function from a list of clouds
# group: list of clouds
# boxes: list of boxes
# n: number of samples for cdf
# Returns: averaged function g and samples
def __group_function_G(self, group, n):
# Computing Nearest Neighbour distances
dists = list()
for cloud in group:
dists += nnde(cloud).tolist()
# Computing Cumulative Probability Distribution
return compute_cdf(np.asarray(dists, dtype=np.float), n)
# Computes F function from a list of clouds
# group: list of clouds
# boxes: list of boxes
# n: number of samples for cdf
# m: number of random simulations
# Returns: averaged function F and samples
def __group_function_F(self, group, boxes, n, m):
l_group = len(group)
if m < l_group:
error_msg = 'The number of simulations (' + str(m) + ') must be equal or greather than the length ' \
'of clouds (' + str(l_group) + ')'
raise pexceptions.PySegInputError(expr='__group_function_F (GroupClouds)', msg=error_msg)
# Computing Nearest Neighbour distances
dists = list()
for i in range(m):
c_id = i % l_group
dists += cnnde(gen_rand_cloud(group[c_id].shape[0], boxes[c_id]), group[c_id]).tolist()
# Computing Cumulative Probability Distribution
dists = np.asarray(dists, dtype=np.float)
return compute_cdf(dists, n)
# Simulates G-Function for a the random case with a number of simulations
# group: list of clouds for reference
# boxes: list of boxes
# n: number of samples
# m: number of random simulations
# p: percentile for envelopes
# Returns: samples, >p envelope, median, <100-p envelope
def __rand_group_function_G(self, group, boxes, n, m, p):
l_group = len(group)
if m < l_group:
error_msg = 'The number of simulations (' + str(m) + ') must be equal or greather than the length ' \
'of clouds (' + str(l_group) + ')'
raise pexceptions.PySegInputError(expr='_rand_group_function_G (GroupClouds)', msg=error_msg)
# Generate random points
cont = 0
cdfs = np.zeros(shape=(n, m), dtype=np.float)
# Random simulation
for i in range(m):
c_id = i % l_group
rand_dists = nnde(gen_rand_cloud(group[c_id].shape[0], boxes[c_id]))
cdfs[:, cont], sp = compute_cdf(rand_dists, n)
cont += 1
# Compute envelopes
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=50)
env_3 = func_envelope(cdfs, per=100-p)
return sp, env_1, env_2, env_3
# Simulates F-Function for a the random case with a number of simulations
# group: list of clouds for reference
# boxes: list of boxes
# n: number of samples
# m: number of random simulations
# p: percentile for envelopes
# Returns: samples, >p envelope, median, <100-p envelope
def __rand_group_function_F(self, group, boxes, n, m, p):
l_group = len(group)
if m < l_group:
error_msg = 'The number of simulations (' + str(m) + ') must be equal or greather than the length ' \
'of clouds (' + str(l_group) + ')'
raise pexceptions.PySegInputError(expr='_rand_group_function_G (GroupClouds)', msg=error_msg)
# Generate random points
cont = 0
cdfs = np.zeros(shape=(n, m), dtype=np.float)
# Random simulation
for i in range(m):
c_id = i % l_group
rand_dists = cnnde(gen_rand_cloud(group[c_id].shape[0], boxes[c_id]),
gen_rand_cloud(group[c_id].shape[0], boxes[c_id]))
cdfs[:, cont], sp = compute_cdf(rand_dists, n)
cont += 1
# Compute envelopes
env_1 = func_envelope(cdfs, per=p)
env_2 = func_envelope(cdfs, per=50)
env_3 = func_envelope(cdfs, per=100-p)
return sp, env_1, env_2, env_3
###########################################################################################
# Class for plotting overlapped clouds from different membrane slices
###########################################################################################
class GroupPlotter(object):
def __init__(self, name):
self.__name = name
self.__clouds = list()
self.__names = list()
self.__markers = list()
self.__box = None
# Get/Set functionality
# External functionality
# clouds: input cloud
# box: input box
# name: name for the cloud
# marker: if None (default) maker 'o' (circles) is inserted
def insert_cloud(self, cloud, box, name, marker=None):
# Check for valid box
if self.__box is None:
self.__box = box
else:
if box[0] < self.__box[0]:
self.__box[0] = box[0]
if box[1] < self.__box[1]:
self.__box[1] = box[1]
if box[2] > self.__box[0]:
self.__box[2] = box[2]
if box[3] > self.__box[3]:
self.__box[3] = box[3]
# Insert cloud
self.__clouds.append(cloud)
self.__names.append(name)
if marker is None:
self.__markers.append('o')
else:
self.__markers.append(marker)
# Pickling the object state
# fname: full path for the pickle file
def pickle(self, fname):
pkl_f = open(fname, 'w')
try:
pickle.dump(self, pkl_f)
finally:
pkl_f.close()
# Plot figures
# block: if True (default False) waits for closing windows for finishing the execution
def plot(self, block=False):
if len(self.__names) == 0:
print('WARNING: no groups added, call insert_group() first!')
# Initialization
fig_count = 0
if block:
plt.ion()
color = cm.rainbow(np.linspace(0, 1, len(self.__names)))
# Plot with legend
fig_count += 1
plt.figure(fig_count)
plt.title('Overlapped clouds for ' + self.__name +' (legend)')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
lines = list()
for (cloud, mark, c) in zip(self.__clouds, self.__markers, color):
line = plt.scatter(cloud[:, 0], cloud[:, 1], c=c, marker=mark)
lines.append(line)
if len(lines) > 0:
plt.legend(lines, self.__names)
# Plot with legend
fig_count += 1
plt.figure(fig_count)
plt.title('Overlapped clouds for ' + self.__name)
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for (cloud, mark, c) in zip(self.__clouds, self.__markers, color):
plt.scatter(cloud[:, 0], cloud[:, 1], c=c, marker=mark)
# Show
plt.show(block=block)
# Stores figures
# path: path to the folder where figures will be stored
def store_figs(self, path):
if len(self.__names) == 0:
print('WARNING: no groups added, call insert_group() first!')
# Initialization
fig_count = 0
color = cm.rainbow(np.linspace(0, 1, len(self.__names)))
# Plot with legend
fig_count += 1
plt.figure(fig_count)
plt.title('Overlapped clouds for ' + self.__name +' (legend)')
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
lines = list()
for (cloud, mark, c) in zip(self.__clouds, self.__markers, color):
line = plt.scatter(cloud[:, 0], cloud[:, 1], c=c, marker=mark)
lines.append(line)
if len(lines) > 0:
plt.legend(lines, self.__names)
plt.savefig(path + '/' + self.__name + '_ov_lg.png')
plt.close()
# Plot with legend
fig_count += 1
plt.figure(fig_count)
plt.title('Overlapped clouds for ' + self.__name)
plt.xlabel('X (nm)')
plt.ylabel('Y (nm)')
plt.xlim(self.__box[0], self.__box[2])
plt.ylim(self.__box[1], self.__box[3])
for (cloud, mark, c) in zip(self.__clouds, self.__markers, color):
plt.scatter(cloud[:, 0], cloud[:, 1], c=c, marker=mark)
plt.savefig(path + '/' + self.__name + '_ov.png')
plt.close()
| 38.569435
| 115
| 0.529043
| 23,117
| 171,364
| 3.720119
| 0.040403
| 0.01414
| 0.016186
| 0.009489
| 0.836438
| 0.811903
| 0.797833
| 0.783751
| 0.769553
| 0.753052
| 0
| 0.022708
| 0.337749
| 171,364
| 4,443
| 116
| 38.569435
| 0.735077
| 0.16528
| 0
| 0.771607
| 0
| 0.053237
| 0.04002
| 0.000472
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048636
| false
| 0
| 0.004272
| 0.008216
| 0.089057
| 0.005587
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29695c041463522a58f95aa20c231dfe7d63b393
| 6,827
|
py
|
Python
|
tests/test_compare_deep.py
|
anexia-it/python-deepcompare
|
6b0c9a82aadc678e1bc3d17b91083495bd7def7a
|
[
"MIT"
] | null | null | null |
tests/test_compare_deep.py
|
anexia-it/python-deepcompare
|
6b0c9a82aadc678e1bc3d17b91083495bd7def7a
|
[
"MIT"
] | null | null | null |
tests/test_compare_deep.py
|
anexia-it/python-deepcompare
|
6b0c9a82aadc678e1bc3d17b91083495bd7def7a
|
[
"MIT"
] | null | null | null |
import deepcompare
def test_compare_list_in_list_with_list_in_list():
# test full equality
assert deepcompare.compare([[1, 2, ], 2, 3, ], [[1, 2, ], 2, 3, ])
assert deepcompare.compare([[1, 2, ], [2, 3, ], 3, ], [[1, 2, ], [2, 3, ], 3, ])
assert deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, 2, ], [2, 3, ], [3, 4, ], ])
# test partial equality
assert not deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, ], [2, 3, ], [3, 4, ], ])
assert not deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, 2, ], [2, ], [3, 4, ], ])
assert not deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, 2, ], [2, 3, ], [3, ], ])
assert not deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, 2, ], [2, 3, ], ])
# test non-equality
assert not deepcompare.compare([[1, 2, ], [2, 3, ], 3, ], [[1, 2, ], 2, 3, ])
assert not deepcompare.compare([[1, 2, ], [2, 3, ], [3, 4, ], ], [[1, 2, ], 2, 3, ])
def test_compare_list_in_list_in_list_with_list_in_list_in_list():
# test full equality
assert deepcompare.compare([[[1, 2, ], 2, ], 2, 3, ], [[[1, 2, ], 2, ], 2, 3, ])
assert deepcompare.compare([[[1, 2, ], 2, ], [[2, 3, ], 3, ], 3, ], [[[1, 2, ], 2, ], [[2, 3, ], 3, ], 3, ])
# test partial equality
assert not deepcompare.compare([[[1, 2, ], 2, ], [[2, 3, ], 3, ], 3, ], [[[1, ], 2, ], [[2, 3, ], 3, ], 3, ])
assert not deepcompare.compare([[[1, 2, ], 2, ], [[2, 3, ], 3, ], 3, ], [[[1, 2, ], 2, ], [[2, ], 3, ], 3, ])
assert not deepcompare.compare([[[1, 2, ], 2, ], [[2, 3, ], 3, ], 3, ], [[[1, 2, ], 2, ], [[2, 3, ], ], 3, ])
# test non-equality
assert not deepcompare.compare([[[1, 2, ], 2, ], 2, 3, ], [[1, 2, ], 2, 3, ])
assert not deepcompare.compare([[[1, 2, ], 2, ], [[2, 3], 3, ], 3, ], [[[1, 2, ], 2, ], [2, 3, ], 3, ])
def test_compare_tuple_in_list_with_tuple_in_list():
# test full equality
assert deepcompare.compare([(1, 2, ), 2, 3, ], [(1, 2, ), 2, 3, ])
assert deepcompare.compare([(1, 2, ), (2, 3, ), 3, ], [(1, 2, ), [2, 3, ], 3, ])
assert deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, 2, ), (2, 3, ), (3, 4, ), ])
# test partial equality
assert not deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, ), (2, 3, ), (3, 4, ), ])
assert not deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, 2, ), (2, ), (3, 4, ), ])
assert not deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, 2, ), (2, 3, ), (3, ), ])
assert not deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, 2, ), (2, 3, ), ])
# test non-equality
assert not deepcompare.compare([(1, 2, ), (2, 3, ), 3, ], [(1, 2, ), 2, 3, ])
assert not deepcompare.compare([(1, 2, ), (2, 3, ), (3, 4, ), ], [(1, 2, ), 2, 3, ])
def test_compare_tuple_in_list_in_list_with_tuple_in_list_in_list():
# test full equality
assert deepcompare.compare([[(1, 2, ), 2, ], 2, 3, ], [[(1, 2, ), 2, ], 2, 3, ])
assert deepcompare.compare([[(1, 2, ), 2, ], [(2, 3), 3, ], 3, ], [[(1, 2, ), 2, ], [(2, 3, ), 3, ], 3, ])
# test partial equality
assert not deepcompare.compare([[(1, 2, ), 2, ], [(2, 3), 3, ], 3, ], [[(1, ), 2, ], [(2, 3, ), 3, ], 3, ])
assert not deepcompare.compare([[(1, 2, ), 2, ], [(2, 3), 3, ], 3, ], [[(1, 2, ), 2, ], [(2, ), 3, ], 3, ])
assert not deepcompare.compare([[(1, 2, ), 2, ], [(2, 3), 3, ], 3, ], [[(1, 2, ), 2, ], [(2, 3, ), 3, ], ])
# test non-equality
assert not deepcompare.compare([[(1, 2, ), 2, ], 2, 3, ], [(1, 2, ), 2, 3, ])
assert not deepcompare.compare([[(1, 2, ), 2, ], [(2, 3), 3, ], 3, ], [[(1, 2, ), 2, ], [2, 3, ], 3, ])
def test_compare_dict_in_list_with_dict_in_list():
# test full equality
assert deepcompare.compare([{'a': 1, 'b': 2, }, 2, 3, ], [{'a': 1, 'b': 2, }, 2, 3, ])
assert deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, 3, ], [{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, 3, ])
assert deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ])
# test partial equality
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ])
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, 'b': 2, }, {'c': 3, }, {'e': 5, 'f': 6, }, ])
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, }, ])
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, ])
# test non-equality
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, 3, ], [{'a': 1, 'b': 2, }, 2, 3, ])
assert not deepcompare.compare([{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, {'e': 5, 'f': 6, }, ], [{'a': 1, 'b': 2, }, {'c': 3, 'd': 4, }, 3, ])
def test_compare_dict_in_list_in_list_with_dict_in_list_in_list():
# test full equality
assert deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], 2, 3, ], [[{'a': 1, 'b': 2, }, 2, ], 2, 3, ])
assert deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ], [[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ])
# test partial equality
assert not deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ], [[{'a': 1, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ])
assert not deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ], [[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, }, 3, ], 3, ])
assert not deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ], [[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], ])
# test non-equality
assert not deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], 2, 3, ], [{'a': 1, 'b': 2, }, 2, 3, ])
assert not deepcompare.compare([[{'a': 1, 'b': 2, }, 2, ], [{'c': 3, 'd': 4, }, 3, ], 3, ], [[{'a': 1, 'b': 2, }, 2, ], [2, 3, ], 3, ])
def test_compare_list_in_dict():
# test full equality
assert deepcompare.compare({'a': [1, 2, ], 'b': 3, }, {'a': [1, 2, ], 'b': 3, })
assert deepcompare.compare({'a': [1, 2, ], 'b': [3, 4, ], }, {'a': [1, 2, ], 'b': [3, 4, ], })
# test partial equality
assert not deepcompare.compare({'a': [1, 2, ], 'b': 3, }, {'a': [1, 2, ], })
assert not deepcompare.compare({'a': [1, 2, ], 'b': [3, 4, ], }, {'a': [1, 2, ], 'b': [3, ], })
# test non-equality
assert not deepcompare.compare({'a': [1, 2, ], 'b': [3, 4, ], }, {'a': [1, 2, ], 'b': 3, })
assert not deepcompare.compare({'a': [1, 2, ], 'b': [3, 4, ], }, {'a': [1, 2, ], 'b': [3, 4, 5, ], })
| 60.955357
| 158
| 0.418046
| 1,066
| 6,827
| 2.617261
| 0.026266
| 0.075986
| 0.075269
| 0.061649
| 0.992115
| 0.992115
| 0.965591
| 0.932258
| 0.914695
| 0.908961
| 0
| 0.110279
| 0.237586
| 6,827
| 111
| 159
| 61.504505
| 0.425744
| 0.060349
| 0
| 0
| 0
| 0
| 0.023463
| 0
| 0
| 0
| 0
| 0
| 0.870968
| 1
| 0.112903
| true
| 0
| 0.016129
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
463c1b930785d4db5c238e400f6ec325ac560561
| 235
|
py
|
Python
|
need2fix/app/util.py
|
MasterTos/web_needtofix
|
6bd29d5dd6e5d033263d8b7753cd219b982c59d0
|
[
"MIT"
] | null | null | null |
need2fix/app/util.py
|
MasterTos/web_needtofix
|
6bd29d5dd6e5d033263d8b7753cd219b982c59d0
|
[
"MIT"
] | 7
|
2020-06-05T19:11:48.000Z
|
2022-03-11T23:31:15.000Z
|
need2fix/app/util.py
|
MasterTos/web_needtofix
|
6bd29d5dd6e5d033263d8b7753cd219b982c59d0
|
[
"MIT"
] | 1
|
2018-10-09T11:46:06.000Z
|
2018-10-09T11:46:06.000Z
|
def is_mechanic(user):
return user.groups.filter(name='mechanic')
# return user.is_superuser
def is_mechanic_above(user):
return user.groups.filter(name='mechanic') or user.is_superuser
# return user.is_superuser
| 33.571429
| 68
| 0.731915
| 33
| 235
| 5.030303
| 0.333333
| 0.240964
| 0.271084
| 0.240964
| 0.457831
| 0.457831
| 0.457831
| 0
| 0
| 0
| 0
| 0
| 0.161702
| 235
| 7
| 69
| 33.571429
| 0.84264
| 0.208511
| 0
| 0
| 0
| 0
| 0.089888
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
46487744cc11ee70fa57ef65fcb2eecde558ae17
| 129
|
py
|
Python
|
server/apps/streamer/worker/misc/tests/__init__.py
|
iotile/iotile_cloud
|
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
|
[
"MIT"
] | null | null | null |
server/apps/streamer/worker/misc/tests/__init__.py
|
iotile/iotile_cloud
|
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
|
[
"MIT"
] | null | null | null |
server/apps/streamer/worker/misc/tests/__init__.py
|
iotile/iotile_cloud
|
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
|
[
"MIT"
] | null | null | null |
from .tests_adjust_timestamp import *
from .tests_adjust_timestamp_reverse import *
from .tests_forward_streamer_report import *
| 32.25
| 45
| 0.860465
| 17
| 129
| 6.058824
| 0.529412
| 0.262136
| 0.291262
| 0.466019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 129
| 3
| 46
| 43
| 0.880342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
464d03d9ea602e9cc934e9173f345ffd30ce4b12
| 10,529
|
py
|
Python
|
py/HW3/option_models/sabr.py
|
polarbluebear/ASP
|
79659140b161a63d7e08df6c4d9892aa83ba3070
|
[
"MIT"
] | null | null | null |
py/HW3/option_models/sabr.py
|
polarbluebear/ASP
|
79659140b161a63d7e08df6c4d9892aa83ba3070
|
[
"MIT"
] | null | null | null |
py/HW3/option_models/sabr.py
|
polarbluebear/ASP
|
79659140b161a63d7e08df6c4d9892aa83ba3070
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 10
@author: jaehyuk
"""
import numpy as np
import scipy.stats as ss
import scipy.optimize as sopt
import scipy.integrate as spint
import pyfeng as pf
from . import normal
from . import bsm
'''
MC model class for Beta=1
'''
class ModelBsmMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0, time_steps=1_000, n_samples=10_000):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.time_steps = time_steps
self.n_samples = n_samples
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol
Use self.bsm_model.impvol() method
'''
price = self.price(strike, spot, texp, sigma)
vol = self.bsm_model.impvol(price, strike, spot, texp)
return vol
def price(self, strike, spot, texp=None, sigma=None, cp=1, time_steps=1_000, n_samples=10_000):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
np.random.seed(12345)
div_fac = np.exp(-texp * self.divr)
disc_fac = np.exp(-texp * self.intr)
forward = spot / disc_fac * div_fac
if sigma is None:
sigma = self.sigma
self.time_steps = time_steps # number of time steps of MC
self.n_samples = n_samples # number of samples of MC
# Generate correlated normal random variables W1, Z1
z = np.random.normal(size=(self.n_samples, self.time_steps))
x = np.random.normal(size=(self.n_samples, self.time_steps))
w = self.rho * z + np.sqrt(1-self.rho**2) * x
path_size = np.zeros([self.n_samples, self.time_steps + 1])
delta_tk = texp / self.time_steps
log_sk = np.log(spot) * np.ones_like(path_size) # log price
sk = spot * np.ones_like(path_size) # price
sigma_tk = self.sigma * np.ones_like(path_size) # sigma
for i in range(self.time_steps):
log_sk[:, i+1] = log_sk[:, i] + sigma_tk[:, i] * np.sqrt(delta_tk) * w[:, i] - 0.5 * (sigma_tk[:, i]**2) * delta_tk
sigma_tk[:, i+1] = sigma_tk[:, i] * np.exp(self.vov * np.sqrt(delta_tk) * z[:, i] - 0.5 * (self.vov**2) * delta_tk)
sk[:, i+1] = np.exp(log_sk[:, i+1])
price = np.zeros_like(strike)
for j in range(len(strike)):
price[j] = np.mean(np.maximum(sk[:, -1] - strike[j], 0))
return disc_fac * price
'''
MC model class for Beta=0
'''
class ModelNormalMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0, time_steps=1_000, n_samples=10_000):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.time_steps = time_steps
self.n_samples = n_samples
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol.
Use self.normal_model.impvol() method
'''
price = self.price(strike, spot, texp, sigma)
vol = self.normal_model.impvol(price, strike, spot, texp)
return vol
def price(self, strike, spot, texp=None, sigma=None, cp=1, time_steps=1_000, n_samples=10_000):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
np.random.seed(12345)
div_fac = np.exp(-texp * self.divr)
disc_fac = np.exp(-texp * self.intr)
forward = spot / disc_fac * div_fac
if sigma is None:
sigma = self.sigma
self.time_steps = time_steps # number of time steps of MC
self.n_samples = n_samples # number of samples of MC
# Generate correlated normal random variables W1, Z1
z = np.random.normal(size=(self.n_samples, self.time_steps))
x = np.random.normal(size=(self.n_samples, self.time_steps))
w = self.rho * z + np.sqrt(1-self.rho**2) * x
path_size = np.zeros([self.n_samples, self.time_steps + 1])
delta_tk = texp / self.time_steps
sk = spot * np.ones_like(path_size) # price
sigma_tk = self.sigma * np.ones_like(path_size) # sigma
for i in range(self.time_steps):
sk[:, i+1] = sk[:, i] + sigma_tk[:, i] * np.sqrt(delta_tk) * w[:, i]
sigma_tk[:, i+1] = sigma_tk[:, i] * np.exp(self.vov * np.sqrt(delta_tk) * z[:, i] - 0.5 * (self.vov**2) * delta_tk)
price = np.zeros_like(strike)
for j in range(len(strike)):
price[j] = np.mean(np.maximum(sk[:, -1] - strike[j], 0))
return disc_fac * price
'''
Conditional MC model class for Beta=1
'''
class ModelBsmCondMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0, time_steps=1_000, n_samples=10_000):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.time_steps = time_steps
self.n_samples = n_samples
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None):
''''
should be same as bsm_vol method in ModelBsmMC (just copy & paste)
'''
price = self.price(strike, spot, texp, sigma)
vol = self.bsm_model.impvol(price, strike, spot, texp)
return vol
def price(self, strike, spot, texp=None, cp=1, time_steps=1_000, n_samples=10_000):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and BSM price.
Then get prices (vector) for all strikes
You may fix the random number seed
'''
np.random.seed(12345)
div_fac = np.exp(-texp * self.divr)
disc_fac = np.exp(-texp * self.intr)
forward = spot / disc_fac * div_fac
self.time_steps = time_steps # number of time steps of MC
self.n_samples = n_samples # number of samples of MC
# Generate correlated normal random variables Z
z = np.random.normal(size=(self.n_samples, self.time_steps))
delta_tk = texp / self.time_steps
sigma_tk = self.sigma * np.ones([self.n_samples, self.time_steps+1])
for i in range(self.time_steps):
sigma_tk[:, i+1] = sigma_tk[:, i] * np.exp(self.vov * np.sqrt(delta_tk) * z[:, i] - 0.5 * (self.vov ** 2) * delta_tk)
I = spint.simps(sigma_tk * sigma_tk, dx=texp/self.time_steps) / (self.sigma**2) # integrate by using Simpson's rule
spot_cond = spot * np.exp(self.rho * (sigma_tk[:, -1] - self.sigma) / self.vov - (self.rho*self.sigma)**2 * texp * I / 2)
vol = self.sigma * np.sqrt((1 - self.rho**2) * I)
price = np.zeros_like(strike)
for j in range(len(strike)):
price[j] = np.mean(bsm.price(strike[j], spot_cond, texp ,vol))
return disc_fac * price
'''
Conditional MC model class for Beta=0
'''
class ModelNormalCondMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=0.0, intr=0, divr=0, time_steps=1_000, n_samples=10_000):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.time_steps = time_steps
self.n_samples = n_samples
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None):
''''
should be same as norm_vol method in ModelNormalMC (just copy & paste)
'''
price = self.price(strike, spot, texp, sigma)
vol = self.normal_model.impvol(price, strike, spot, texp)
return vol
def price(self, strike, spot, texp=None, cp=1, time_steps=1_000, n_samples=10_000):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and normal price.
You may fix the random number seed
'''
np.random.seed(12345)
div_fac = np.exp(-texp * self.divr)
disc_fac = np.exp(-texp * self.intr)
forward = spot / disc_fac * div_fac
self.time_steps = time_steps # number of time steps of MC
self.n_samples = n_samples # number of samples of MC
# Generate correlated normal random variables Z
z = np.random.normal(size=(self.n_samples, self.time_steps))
delta_tk = texp / self.time_steps
sigma_tk = self.sigma * np.ones([self.n_samples, self.time_steps+1])
for i in range(self.time_steps):
sigma_tk[:, i+1] = sigma_tk[:, i] * np.exp(self.vov * np.sqrt(delta_tk) * z[:, i] - 0.5 * (self.vov ** 2) * delta_tk)
I = spint.simps(sigma_tk * sigma_tk, dx=texp/self.time_steps) / (self.sigma**2) # integrate by using Simpson's rule
spot_cond = spot + self.rho * (sigma_tk[:, -1] - self.sigma) / self.vov
vol = self.sigma * np.sqrt((1 - self.rho**2) * I)
price = np.zeros_like(strike)
for j in range(len(strike)):
price[j] = np.mean(normal.price(strike[j], spot_cond, texp ,vol))
return disc_fac * price
| 37.469751
| 129
| 0.572609
| 1,557
| 10,529
| 3.737315
| 0.09377
| 0.07424
| 0.062554
| 0.027496
| 0.93126
| 0.929541
| 0.92576
| 0.915106
| 0.915106
| 0.904451
| 0
| 0.027949
| 0.306772
| 10,529
| 280
| 130
| 37.603571
| 0.769283
| 0.149682
| 0
| 0.854545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072727
| false
| 0
| 0.042424
| 0
| 0.236364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
465c0390ac772f4d32bd91656aabda6df47c7309
| 4,809
|
py
|
Python
|
tests/draw/svg/test_visibility.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 4,512
|
2015-01-02T16:40:59.000Z
|
2022-03-31T17:26:28.000Z
|
tests/draw/svg/test_visibility.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 1,420
|
2015-01-07T21:17:01.000Z
|
2022-03-31T10:23:45.000Z
|
tests/draw/svg/test_visibility.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 640
|
2015-01-30T18:07:09.000Z
|
2022-03-24T20:17:42.000Z
|
"""
weasyprint.tests.test_draw.svg.test_visibility
----------------------------------------------
Test how the visibility is controlled with "visibility" and "display"
attributes.
"""
from ...testing_utils import assert_no_logs
from .. import assert_pixels
@assert_no_logs
def test_visibility_visible():
assert_pixels('visibility_visible', 9, 9, '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect visibility="visible"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')
@assert_no_logs
def test_visibility_hidden():
assert_pixels('visibility_hidden', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect visibility="hidden"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')
@assert_no_logs
def test_visibility_inherit_hidden():
assert_pixels('visibility_inherit_hidden', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g visibility="hidden">
<rect x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')
@assert_no_logs
def test_visibility_inherit_visible():
assert_pixels('visibility_inherit_visible', 9, 9, '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g visibility="hidden">
<rect visibility="visible"
x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')
@assert_no_logs
def test_display_inline():
assert_pixels('display_inline', 9, 9, '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect display="inline"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')
@assert_no_logs
def test_display_none():
assert_pixels('display_none', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect display="none"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')
@assert_no_logs
def test_display_inherit_none():
assert_pixels('display_inherit_none', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g display="none">
<rect x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')
@assert_no_logs
def test_display_inherit_inline():
assert_pixels('display_inherit_inline', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g display="none">
<rect display="inline"
x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')
| 22.9
| 73
| 0.510917
| 440
| 4,809
| 4.140909
| 0.115909
| 0.065862
| 0.059276
| 0.065862
| 0.768386
| 0.768386
| 0.75247
| 0.75247
| 0.731614
| 0.731614
| 0
| 0.035298
| 0.340195
| 4,809
| 209
| 74
| 23.009569
| 0.538922
| 0.036598
| 0
| 0.891304
| 0
| 0.086957
| 0.820095
| 0.015842
| 0
| 0
| 0
| 0
| 0.097826
| 1
| 0.043478
| true
| 0
| 0.01087
| 0
| 0.054348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d3bbfbaef1daa29b6267e76c7cb0626134052c65
| 104
|
py
|
Python
|
src/models/knn.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | 1
|
2018-03-08T23:44:18.000Z
|
2018-03-08T23:44:18.000Z
|
src/models/knn.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | null | null | null |
src/models/knn.py
|
GavinNishizawa/ncaa-march-madness-2018
|
8324e48ba32c685d60a4eb97e0f10f664a88710b
|
[
"MIT"
] | null | null | null |
from sklearn import neighbors
def create():
return neighbors.KNeighborsClassifier(n_neighbors=5)
| 14.857143
| 56
| 0.788462
| 12
| 104
| 6.75
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.144231
| 104
| 6
| 57
| 17.333333
| 0.898876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
314220d95bdd19dcffb8589ceab831503ca946bf
| 13,672
|
py
|
Python
|
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/cms/djangoapps/models/settings/tests/test_settings.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | 3
|
2021-12-15T04:58:18.000Z
|
2022-02-06T12:15:37.000Z
|
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/cms/djangoapps/models/settings/tests/test_settings.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | null | null | null |
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/cms/djangoapps/models/settings/tests/test_settings.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | 1
|
2019-01-02T14:38:50.000Z
|
2019-01-02T14:38:50.000Z
|
"""
Tests for the advanced settings
"""
import unittest
import ddt
from cms.djangoapps.models.settings.course_metadata import CourseMetadata
working_config_block = {
"teams_configuration": {
"value": {
"max_team_size": 4,
"topics": [
{
"max_team_size": 5,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_negative_team_size = {
"teams_configuration": {
"value": {
"max_team_size": -1,
"topics": [
{
"max_team_size": 5,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_negative_local_team_size = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"topics": [
{
"max_team_size": -4,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_duplicate_id = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"topics": [
{
"max_team_size": 4,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_1_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_negative_team_size_dupe_id = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"topics": [
{
"max_team_size": -4,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_2_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_missing_name = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"topics": [
{
"max_team_size": 4,
"name": "",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_extra_attribute = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"topics": [
{
"max_team_size": 4,
"name": "Topic 1 name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed",
"foo": "bar"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_unrecognized_teamset_type = {
"teams_configuration": {
"value": {
"max_team_size": 2,
"team_sets": [
{
"max_team_size": 4,
"name": "Topic 1 name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "foo",
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_no_global_max_team_size = {
"teams_configuration": {
"value": {
"topics": [
{
"max_team_size": 5,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
{
"id": "topic_2_id",
"name": "Topic 2 Name",
"description": "Topic 2 desc"
},
{
"id": "topic_3_id",
"name": "Topic 3 Name",
"description": "Topic 3 desc"
},
{
"id": "private_topic_1_id",
"type": "private_managed",
"description": "Private Topic 1 desc",
"name": "Private Topic 1 Name"
},
{
"id": "private_topic_2_id",
"type": "private_managed",
"description": "Private Topic 2 desc",
"name": "Private Topic 2 Name"
}
]
}
}
}
config_block_course_max_team_size = {
"teams_configuration": {
"value": {
"max_team_size": 501,
"topics": [
{
"max_team_size": 500,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
]
}
}
}
config_block_teamset_max_team_size = {
"teams_configuration": {
"value": {
"max_team_size": 500,
"topics": [
{
"max_team_size": 501,
"name": "Topic 1 Name",
"id": "topic_1_id",
"description": "Topic 1 desc",
"type": "public_managed"
},
]
}
}
}
@ddt.ddt
class TeamsConfigurationTests(unittest.TestCase):
"""
Test class for advanced settings of teams
"""
@ddt.data(
(working_config_block, set()),
(config_block_negative_team_size, {'max_team_size must be greater than zero'}),
(config_block_negative_local_team_size, {'max_team_size must be greater than zero'}),
(config_block_duplicate_id, {'duplicate ids: topic_1_id'}),
(
config_block_negative_team_size_dupe_id,
{'duplicate ids: topic_2_id', 'max_team_size must be greater than zero'}
),
(config_block_missing_name, {'name attribute must not be empty'}),
(config_block_extra_attribute, {'extra keys: foo'}),
(config_block_unrecognized_teamset_type, {'type foo is invalid'}),
(config_block_no_global_max_team_size, set()),
(config_block_course_max_team_size, {'max_team_size cannot be greater than 500'}),
(config_block_teamset_max_team_size, {'max_team_size cannot be greater than 500'})
)
@ddt.unpack
def test_team_settings(self, config_block, error_message):
result = CourseMetadata.validate_team_settings(config_block)
self.assertEqual(len(result), len(error_message))
if len(error_message) > 0:
for res in result:
self.assertIn(res['message'], error_message)
| 32.093897
| 93
| 0.377633
| 1,084
| 13,672
| 4.476015
| 0.081181
| 0.075433
| 0.072547
| 0.046991
| 0.855317
| 0.839654
| 0.81183
| 0.786892
| 0.778236
| 0.759687
| 0
| 0.026539
| 0.509435
| 13,672
| 425
| 94
| 32.169412
| 0.696884
| 0.005339
| 0
| 0.544776
| 0
| 0
| 0.315115
| 0
| 0
| 0
| 0
| 0
| 0.004975
| 1
| 0.002488
| false
| 0
| 0.007463
| 0
| 0.012438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ee9b47874eb5f46c6a489afbaaad64f405b28b1
| 15,409
|
py
|
Python
|
src/genie/libs/parser/viptela/tests/ShowControlConnectionHistory/cli/equal/golden_output1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/viptela/tests/ShowControlConnectionHistory/cli/equal/golden_output1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/viptela/tests/ShowControlConnectionHistory/cli/equal/golden_output1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
"peer_type": {
"vbond": {
"downtime": {
"2021-12-15T04:19:41+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DCONFAIL",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "1",
"site_id": "0",
"state": "connect",
},
"2021-12-16T17:40:20+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "4",
"site_id": "0",
"state": "tear_down",
},
"2021-12-16T19:28:22+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "7",
"site_id": "0",
"state": "tear_down",
},
"2021-12-17T04:55:11+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "0",
"site_id": "0",
"state": "tear_down",
},
"2021-12-17T04:57:19+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "0",
"state": "tear_down",
},
"2021-12-17T14:36:12+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "1",
"site_id": "0",
"state": "tear_down",
},
"2021-12-21T06:50:19+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DCONFAIL",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "3",
"site_id": "0",
"state": "connect",
},
"2021-12-21T06:54:07+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "13",
"site_id": "0",
"state": "tear_down",
},
"2021-12-21T15:05:22+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "4",
"site_id": "0",
"state": "tear_down",
},
"2022-01-19T06:18:27+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "0",
"state": "tear_down",
},
"2022-01-19T06:18:57+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DCONFAIL",
"peer_organization": "",
"peer_private_ip": "184.118.1.19",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.19",
"peer_public_port": "12346",
"peer_system_ip": "0.0.0.0",
"remote_error": "NOERR",
"repeat_count": "0",
"site_id": "0",
"state": "connect",
},
},
},
"vmanage": {
"downtime": {
"2021-12-16T19:28:22+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.31",
"peer_private_port": "12746",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.31",
"peer_public_port": "12746",
"peer_system_ip": "10.0.0.2",
"remote_error": "NOERR",
"repeat_count": "7",
"site_id": "100",
"state": "tear_down",
},
"2021-12-17T04:57:19+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.31",
"peer_private_port": "12746",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.31",
"peer_public_port": "12746",
"peer_system_ip": "10.0.0.2",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "100",
"state": "tear_down",
},
"2021-12-21T06:54:07+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.31",
"peer_private_port": "12746",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.31",
"peer_public_port": "12746",
"peer_system_ip": "10.0.0.2",
"remote_error": "NOERR",
"repeat_count": "13",
"site_id": "100",
"state": "tear_down",
},
"2021-12-21T15:05:22+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.31",
"peer_private_port": "12746",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.31",
"peer_public_port": "12746",
"peer_system_ip": "10.0.0.2",
"remote_error": "NOERR",
"repeat_count": "4",
"site_id": "100",
"state": "tear_down",
},
"2022-01-19T06:18:27+0000": {
"domain_id": "0",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.31",
"peer_private_port": "12746",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.31",
"peer_public_port": "12746",
"peer_system_ip": "10.0.0.2",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "100",
"state": "tear_down",
},
},
},
"vsmart": {
"downtime": {
"2021-12-16T19:28:22+0000": {
"domain_id": "1",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.21",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.21",
"peer_public_port": "12346",
"peer_system_ip": "10.0.0.3",
"remote_error": "NOERR",
"repeat_count": "7",
"site_id": "100",
"state": "tear_down",
},
"2021-12-17T04:57:19+0000": {
"domain_id": "1",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.21",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.21",
"peer_public_port": "12346",
"peer_system_ip": "10.0.0.3",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "100",
"state": "tear_down",
},
"2021-12-21T06:54:07+0000": {
"domain_id": "1",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.21",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.21",
"peer_public_port": "12346",
"peer_system_ip": "10.0.0.3",
"remote_error": "NOERR",
"repeat_count": "13",
"site_id": "100",
"state": "tear_down",
},
"2021-12-21T15:05:22+0000": {
"domain_id": "1",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.21",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.21",
"peer_public_port": "12346",
"peer_system_ip": "10.0.0.3",
"remote_error": "NOERR",
"repeat_count": "4",
"site_id": "100",
"state": "tear_down",
},
"2022-01-19T06:18:27+0000": {
"domain_id": "1",
"local_color": "gold",
"local_error": "DISTLOC",
"peer_organization": "",
"peer_private_ip": "184.118.1.21",
"peer_private_port": "12346",
"peer_protocol": "dtls",
"peer_public_ip": "184.118.1.21",
"peer_public_port": "12346",
"peer_system_ip": "10.0.0.3",
"remote_error": "NOERR",
"repeat_count": "2",
"site_id": "100",
"state": "tear_down",
},
},
},
},
}
| 43.651558
| 55
| 0.355117
| 1,288
| 15,409
| 3.939441
| 0.059783
| 0.016949
| 0.06622
| 0.074497
| 0.982263
| 0.980883
| 0.979898
| 0.973591
| 0.967678
| 0.955853
| 0
| 0.148812
| 0.500227
| 15,409
| 352
| 56
| 43.775568
| 0.510064
| 0
| 0
| 0.883523
| 0
| 0
| 0.381949
| 0.033473
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9ef05d34d8c5c9dfe6892d752196da8dd8d014b6
| 5,002
|
py
|
Python
|
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
|
lcsnetworks/tacticalrmm
|
c9135f157394f51dd6ca3d43b18fa3ea0afea65b
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
|
lcsnetworks/tacticalrmm
|
c9135f157394f51dd6ca3d43b18fa3ea0afea65b
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
|
lcsnetworks/tacticalrmm
|
c9135f157394f51dd6ca3d43b18fa3ea0afea65b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.6 on 2021-10-10 02:49
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0018_auto_20211010_0249'),
('accounts', '0027_auto_20210903_0054'),
]
operations = [
migrations.AddField(
model_name='role',
name='can_list_accounts',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_agent_history',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_agents',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_alerts',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_api_keys',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_automation_policies',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_autotasks',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_checks',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_clients',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_deployments',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_notes',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_pendingactions',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_roles',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_scripts',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_sites',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_list_software',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_ping_agents',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_recover_agents',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='role',
name='can_view_clients',
field=models.ManyToManyField(blank=True, related_name='role_clients', to='clients.Client'),
),
migrations.AddField(
model_name='role',
name='can_view_sites',
field=models.ManyToManyField(blank=True, related_name='role_sites', to='clients.Site'),
),
migrations.AlterField(
model_name='apikey',
name='created_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='apikey',
name='modified_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='role',
name='created_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='role',
name='modified_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='created_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='modified_by',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='role',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='users', to='accounts.role'),
),
]
| 33.125828
| 147
| 0.558776
| 472
| 5,002
| 5.720339
| 0.17161
| 0.09
| 0.105926
| 0.138519
| 0.812593
| 0.812593
| 0.797407
| 0.797407
| 0.727778
| 0.727778
| 0
| 0.019248
| 0.32487
| 5,002
| 150
| 148
| 33.346667
| 0.780278
| 0.008996
| 0
| 0.770833
| 1
| 0
| 0.131181
| 0.024016
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013889
| 0
| 0.034722
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
732564f36e4331bcab31b687da528047c79c9f94
| 26,020
|
py
|
Python
|
python/avi/sdk/saml_avi_api.py
|
aaronjwood/alb-sdk
|
ae4c47b2228651d3f5095e7c14f081aa4adbb732
|
[
"Apache-2.0"
] | 12
|
2021-04-02T15:59:56.000Z
|
2022-01-17T06:31:25.000Z
|
python/avi/sdk/saml_avi_api.py
|
aaronjwood/alb-sdk
|
ae4c47b2228651d3f5095e7c14f081aa4adbb732
|
[
"Apache-2.0"
] | 85
|
2021-04-30T10:21:32.000Z
|
2022-03-31T06:54:37.000Z
|
python/avi/sdk/saml_avi_api.py
|
aaronjwood/alb-sdk
|
ae4c47b2228651d3f5095e7c14f081aa4adbb732
|
[
"Apache-2.0"
] | 10
|
2021-04-03T16:03:57.000Z
|
2022-03-14T23:16:47.000Z
|
# Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: Apache License 2.0
from avi.sdk.avi_api import ApiSession, \
sessionDict, APIError
import requests
import re
import urllib
# import urlparse
import json
from datetime import datetime
from requests import ConnectionError
from requests.exceptions import ChunkedEncodingError
from ssl import SSLError
import time
import logging
logger = logging.getLogger(__name__)
class OneloginSAMLApiSession(ApiSession):
"""
Extends the ApiSession class to override authentication
method and provide helper utilities to work with Avi
Controller and IDPs like onelogin, okta, etc.
"""
SAML_URL_SUFFIX = "/sso/login"
# Request RegX
saml_request_regex = r'<input type=\"hidden\" ' \
r'name=\"SAMLRequest\" value=\"(.*?)\"'
request_relay_state_regex = r'<input type=\"hidden\" ' \
r'name=\"RelayState\" value=\"(.*?)\"'
request_assertion_url_regex = r'<form method=\"post\" action=\"(.*?)\">'
# Response RegX
saml_response_regex = r'<input type=\"hidden\" ' \
r'name=\"SAMLResponse\" value=\"(.*?)\"'
response_relay_state_regex = r'<input type=\"hidden\" ' \
r'name=\"RelayState\" value=\"(.*?)\"'
response_assertion_url_regex = r'<form method=\"post\" ' \
r'action=\"(.*?)\">'
def __init__(self, controller=None, username=None, password=None,
token=None, tenant=None, tenant_uuid=None, verify=False,
port=None, timeout=60, api_version=None,
retry_conxn_errors=True, data_log=False,
avi_credentials=None, session_id=None, csrftoken=None,
lazy_authentication=False, max_api_retries=None,
idp_cookies=None, user_hdrs=None):
"""
This extends ApiSession class and overrides authentication method
for SMAL authentication.
:param controller: Controller IP
:param username: IDP username
:param password: IDP password
:param token: Controller token
:param tenant: Overrides the tenant used during session creation
:param tenant_uuid: Overrides the tenant or tenant_uuid during session
creation
:param verify: Boolean flag for SSL verification of url
:param port: Controller SSO port
:param timeout: Timeout for API calls; Default value is 60 seconds
:param api_version: Overrides x-avi-header in request header during
session creation
:param retry_conxn_errors: Retry on connection errors
:param data_log: Data log
:param avi_credentials: avi_credential object
:param session_id: Session ID
:param csrftoken: CSRF Token
:param lazy_authentication: Lazy_authentication for controller.
:param max_api_retries: Maximum API retires
:param idp_cookies: IDP cookies if want to use existing IDP session
"""
self.idp_cookies = idp_cookies
super(OneloginSAMLApiSession, self).__init__(
controller, username, password, token,
tenant, tenant_uuid, verify,
port, timeout, api_version,
retry_conxn_errors, data_log,
avi_credentials, session_id, csrftoken,
lazy_authentication, max_api_retries, user_hdrs)
return
def saml_assertion(self, username, password):
"""
Perform SAML request from controller to IDPs.
Establish session with controller and IDP.
Assert SAML request into the reqeust.
Get the controller session and IDP session.
:param username: IDP Username
:param password: IDP Password
:return: controller session and IDP response
"""
# Getting controller session
controller_session = requests.Session()
controller_session.verify = False
saml_controller_url = self.prefix + self.SAML_URL_SUFFIX
logger.info("Getting SAML request from url: %s", saml_controller_url)
resp = controller_session.get(saml_controller_url,
allow_redirects=True)
if resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
# Getting IDP session
idp_session = requests.Session()
saml_request_match = re.search(OneloginSAMLApiSession.saml_request_regex, resp.text,
re.M | re.S)
if not saml_request_match:
logger.error("SAML request not generated by controller.")
raise APIError("SAML request not generated by controller.")
saml_request = saml_request_match.group(1)
relay_state = re.search(OneloginSAMLApiSession.request_relay_state_regex, resp.text,
re.M | re.S).group(1)
assertion_url = re.search(OneloginSAMLApiSession.request_assertion_url_regex, resp.text,
re.M | re.S).group(1)
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
saml_data = urllib.urlencode({
'SAMLRequest': saml_request,
'RelayState': relay_state})
if self.idp_cookies:
logger.info("Controller url %s generated SAML request is being "
"sent to IDP with existing IDP cookies.",
saml_controller_url)
idp_resp = idp_session.post(assertion_url, headers=headers,
data=saml_data, allow_redirects=False,
cookies=self.idp_cookies)
else:
logger.info("Controller url %s generated SAML request is being "
"sent to IDP.", saml_controller_url)
idp_resp = idp_session.post(assertion_url, headers=headers,
data=saml_data,
allow_redirects=False)
if resp.status_code not in (200, 301, 302):
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
if "SAMLResponse" not in idp_resp.text:
redirect_url = idp_resp.headers['Location']
idp_resp = idp_session.get(redirect_url,
allow_redirects=False)
if resp.status_code not in (200, 301, 302):
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
query_string = idp_resp.headers['Location'].split('=')[1]
data = {"return": query_string}
json_data = json.dumps(data)
headers = {'content-type': 'application/json'}
parsed_uri = urlparse.urlparse(assertion_url)
# This needs to be modified for other IDPs.
auth_url = "{}://{}/access/auth".format(parsed_uri.scheme,
parsed_uri.netloc)
resp = idp_session.post(auth_url, headers=headers,
data=json_data)
if resp.status_code in [401, 403]:
logger.error('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text))
raise APIError('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text), resp)
elif resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
# credentials payload for given IDP
credentials_tuple = [('username', 'login',
username),
('password', 'password',
password)]
for state in credentials_tuple:
bearer = "Bearer " + resp.text.split('jwt":"')[1][:-3]
headers = {'content-type': 'application/json',
'authorization': bearer}
user_data = {'state': state[0],
'payload': {state[1]: state[2]}}
json_data = json.dumps(user_data)
resp = idp_session.put(auth_url, headers=headers,
data=json_data)
if resp.status_code in [401, 403]:
logger.error('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text))
raise APIError('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text), resp)
elif resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
data = json.loads(resp.text)
try:
token = data["request"]["params"]["saml_request_params_token"]
except KeyError:
raise APIError("Couldn't complete "
"authentication with IDP")
url = data["request"]["uri"]
params = {'saml_request_params_token': token}
resp = idp_session.get(url, params=params)
if resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
return controller_session, resp
def authenticate_session(self):
"""
Performs SAML authentication with Avi controller and IDPs.
Stores session cookies and sets header parameters.
"""
username = self.avi_credentials.username
if self.avi_credentials.password:
password = self.avi_credentials.password
else:
raise APIError("No user password provided")
logger.debug('authenticating user %s prefix %s',
self.avi_credentials.username, self.prefix)
self.cookies.clear()
try:
# Assert SAML response
controller_session, resp = self.saml_assertion(username, password)
content = resp.text
saml_response_match = re.search(OneloginSAMLApiSession.saml_response_regex, content,
re.M | re.S)
saml_response = saml_response_match.group(1)
relay_state = re.search(OneloginSAMLApiSession.response_relay_state_regex, content, re.M |
re.S).group(1)
assertion_url = re.search(OneloginSAMLApiSession.response_assertion_url_regex, content, re.M |
re.S).group(1)
saml_data = urllib.urlencode([
('SAMLResponse', saml_response),
('RelayState', relay_state)])
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
rsp = controller_session.post(assertion_url,
headers=headers,
data=saml_data)
if rsp.status_code == 200:
self.num_session_retries = 0
self.remote_api_version = \
rsp.headers.get('AVI_API_VERSION', {})
self.headers.update(self.user_hdrs)
if rsp.cookies and 'csrftoken' in rsp.cookies:
sessionDict[self.key] = {
'csrftoken': rsp.cookies['csrftoken'],
'session_id': rsp.cookies['sessionid'],
'last_used': datetime.utcnow(),
'api': self,
'connected': True
}
logger.debug("authentication success for user %s",
self.avi_credentials.username)
return
# Check for bad request and invalid credentials response code
elif rsp.status_code in [401, 403]:
logger.error('Status Code %s msg %s' % (
rsp.status_code, rsp.text))
err = APIError('Status Code %s msg %s' % (
rsp.status_code, rsp.text), rsp)
else:
logger.error("Error status code %s msg %s", rsp.status_code,
rsp.text)
err = APIError('Status Code %s msg %s' % (
rsp.status_code, rsp.text), rsp)
except (ConnectionError, SSLError, ChunkedEncodingError) as e:
if not self.retry_conxn_errors:
raise
logger.warning('Connection error retrying %s', e)
err = e
# comes here only if there was either exception or login was not
# successful
if self.retry_wait_time:
time.sleep(self.retry_wait_time)
self.num_session_retries += 1
if self.num_session_retries > self.max_session_retries:
self.num_session_retries = 0
logger.error("Giving up after %d retries connection failure %s" % (
self.max_session_retries, True))
raise err
self.authenticate_session()
return
class OktaSAMLApiSession(ApiSession):
"""
Extends the ApiSession session class to provide helper
utilities to work with Avi Controller and IDP for SAML assertion and
authentication, api massaging, etc
"""
SAML_URL_SUFFIX = "/sso/login"
# Request RegX
saml_request_regex = r'<input type=\"hidden\" ' \
r'name=\"SAMLRequest\" value=\"(.*?)\"'
request_relay_state_regex = r'<input type=\"hidden\" ' \
r'name=\"RelayState\" value=\"(.*?)\"'
request_assertion_url_regex = r'<form method=\"post\" action=\"(.*?)\">'
# Response RegX
saml_response_regex = r'<input name=\"SAMLResponse\" ' \
r'type=\"hidden\" value=\"(.*?)\"'
response_relay_state_regex = r'<input name=\"RelayState\" ' \
r'type=\"hidden\" value=\"(.*?)\"'
response_assertion_url_regex = r'<form id=\"appForm\" ' \
r'action=\"(.*?)\" method=\"post\">'
def __init__(self, controller=None, username=None, password=None,
token=None, tenant=None, tenant_uuid=None, verify=False,
port=None, timeout=60, api_version=None,
retry_conxn_errors=True, data_log=False,
avi_credentials=None, session_id=None, csrftoken=None,
lazy_authentication=False, max_api_retries=None,
idp_cookies=None):
"""
This extends ApiSession class and overrides authentication method
for SMAL authentication.
:param controller: Controller IP
:param username: IDP username
:param password: IDP password
:param token: Controller token
:param tenant: Overrides the tenant used during session creation
:param tenant_uuid: Overrides the tenant or tenant_uuid during session
creation
:param verify: Boolean flag for SSL verification of url
:param port: Controller SSO port
:param timeout: Timeout for API calls; Default value is 60 seconds
:param api_version: Overrides x-avi-header in request header during
session creation
:param retry_conxn_errors: Retry on connection errors
:param data_log: Data log
:param avi_credentials: avi_credential object
:param session_id: Session ID
:param csrftoken: CSRF Token
:param lazy_authentication: Lazy_authentication for controller.
:param max_api_retries: Maximum API retires
:param idp_cookies: IDP cookies if want to use existing IDP session
"""
self.idp_cookies = idp_cookies
super(OktaSAMLApiSession, self).__init__(
controller, username, password, token,
tenant, tenant_uuid, verify,
port, timeout, api_version,
retry_conxn_errors, data_log,
avi_credentials, session_id, csrftoken,
lazy_authentication, max_api_retries)
return
def saml_assertion(self, username, password):
"""
Perform SAML request from controller to IDPs.
Establish session with controller and IDP.
Assert SAML request into the reqeust.
Get the controller session and IDP session.
:param username: IDP Username
:param password: IDP Password
:return: controller session and IDP response
"""
# Getting controller session
controller_session = requests.Session()
controller_session.verify = False
saml_controller_url = self.prefix + self.SAML_URL_SUFFIX
logger.info("Getting SAML request from url: %s", saml_controller_url)
resp = controller_session.get(saml_controller_url,
allow_redirects=True)
if resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
saml_request_match = re.search(OktaSAMLApiSession.saml_request_regex, resp.text,
re.M | re.S)
if not saml_request_match:
logger.error("SAML request not generated by controller.")
raise APIError("SAML request not generated by controller.")
saml_request = saml_request_match.group(1)
relay_state = re.search(OktaSAMLApiSession.request_relay_state_regex, resp.text,
re.M | re.S).group(1)
assertion_url = re.search(OktaSAMLApiSession.request_assertion_url_regex, resp.text,
re.M | re.S).group(1)
idp_session = requests.Session()
idp_session.verify = False
saml_data = urllib.parse.urlencode({
'SAMLRequest': saml_request,
'RelayState': relay_state})
parsed_uri = urllib.parse.urlparse(assertion_url)
base_url = "{}://{}".format(parsed_uri.scheme, parsed_uri.netloc)
if self.idp_cookies:
logger.info("Controller url %s generated SAML request is being "
"sent to IDP with existing IDP cookies.",
saml_controller_url)
resp = idp_session.get(assertion_url, allow_redirects=False,
cookies=self.idp_cookies)
else:
logger.info("Controller url %s generated SAML request is being "
"sent to IDP.", saml_controller_url)
resp = idp_session.get(assertion_url, allow_redirects=False)
if resp.status_code not in (200, 301, 302):
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
if "SAMLResponse" not in resp.text:
user_data = {"username": username,
"options": {"warnBeforePasswordExpired": True,
"multiOptionalFactorEnroll": True},
"password": password}
json_data = json.dumps(user_data)
headers = {'content-type': 'application/json'}
resp = idp_session.post(base_url + "/api/v1/authn",
headers=headers,
data=json_data)
if resp.status_code in [401, 403]:
logger.error('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text))
raise APIError('Status Code %s msg Invalid SAML credentials %s'
% (resp.status_code, resp.text), resp)
elif resp.status_code != 200:
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
data = json.loads(resp.text)
try:
token = data["sessionToken"]
except KeyError:
raise APIError("Couldn't complete authentication with IDP")
new_url = base_url + "/login/sessionCookieRedirect"
redirect_url = "{}?{}".format(assertion_url, saml_data)
params = {'checkAccountSetupComplete': 'true',
'token': token,
'redirectUrl': redirect_url}
resp = idp_session.get(new_url, params=params,
allow_redirects=True)
if resp.status_code not in (200, 301, 302):
logger.error('Status Code %s msg %s' % (
resp.status_code, resp.text))
raise APIError('Status Code %s msg %s' % (
resp.status_code, resp.text), resp)
return controller_session, resp
def authenticate_session(self):
"""
Performs SAML authentication with Avi controller and IDPs.
Stores session cookies and sets header parameters.
"""
username = self.avi_credentials.username
if self.avi_credentials.password:
password = self.avi_credentials.password
else:
raise APIError("No user password provided")
logger.debug('authenticating user %s prefix %s',
self.avi_credentials.username, self.prefix)
self.cookies.clear()
try:
# Assert SAML response
controller_session, resp = self.saml_assertion(username, password)
content = resp.text
saml_response_match = re.search(OktaSAMLApiSession.saml_response_regex,
content, re.M | re.S)
saml_response = saml_response_match.group(1)
assertion_url = re.search(OktaSAMLApiSession.response_assertion_url_regex, content,
re.M | re.S |
re.IGNORECASE).group(1)
relay_state = re.search(OktaSAMLApiSession.response_relay_state_regex, content,
re.M | re.S).group(1)
from HTMLParser import HTMLParser
parser = HTMLParser()
assertion_url = parser.unescape(assertion_url)
saml_response = parser.unescape(saml_response)
saml_data = urllib.urlencode([
('SAMLResponse', saml_response),
('RelayState', relay_state)])
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
rsp = controller_session.post(assertion_url,
headers=headers,
data=saml_data,
allow_redirects=True)
if rsp.status_code == 200:
self.num_session_retries = 0
self.remote_api_version = \
rsp.headers.get('AVI_API_VERSION', {})
self.headers.update(self.user_hdrs)
if rsp.cookies and 'csrftoken' in rsp.cookies:
sessionDict[self.key] = {
'csrftoken': rsp.cookies['csrftoken'],
'session_id': rsp.cookies['sessionid'],
'last_used': datetime.utcnow(),
'api': self,
'connected': True
}
logger.debug("authentication success for user %s",
self.avi_credentials.username)
return
# Check for bad request and invalid credentials response code
elif rsp.status_code in [401, 403]:
logger.error('Status Code %s msg %s' % (
rsp.status_code, rsp.text))
err = APIError('Status Code %s msg %s' % (
rsp.status_code, rsp.text), rsp)
raise err
else:
logger.error("Error status code %s msg %s", rsp.status_code,
rsp.text)
err = APIError('Status Code %s msg %s' % (
rsp.status_code, rsp.text), rsp)
except (ConnectionError, SSLError, ChunkedEncodingError) as e:
if not self.retry_conxn_errors:
raise
logger.warning('Connection error retrying %s', e)
err = e
# Comes here only if there was either exception or login was not
# successful
if self.retry_wait_time:
time.sleep(self.retry_wait_time)
self.num_session_retries += 1
if self.num_session_retries > self.max_session_retries:
self.num_session_retries = 0
logger.error("Giving up after %d retries connection failure %s" % (
self.max_session_retries, True))
raise err
self.authenticate_session()
return
| 48.635514
| 106
| 0.558493
| 2,729
| 26,020
| 5.159399
| 0.102602
| 0.060369
| 0.038778
| 0.033807
| 0.86733
| 0.848651
| 0.840128
| 0.808523
| 0.800355
| 0.788139
| 0
| 0.007659
| 0.352652
| 26,020
| 534
| 107
| 48.726592
| 0.828247
| 0.139585
| 0
| 0.750617
| 0
| 0
| 0.153708
| 0.013526
| 0
| 0
| 0
| 0
| 0.054321
| 1
| 0.014815
| false
| 0.044444
| 0.02963
| 0
| 0.103704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73260be8f40433b77de6241a013557b2d6d79649
| 337
|
py
|
Python
|
2018/codegate/Miro/hack.py
|
ss8651twtw/CTF
|
cf348597cb72558cc5cd8d50582860fadf014228
|
[
"MIT"
] | 12
|
2018-01-06T13:36:57.000Z
|
2021-07-19T16:47:57.000Z
|
2018/codegate/Miro/hack.py
|
ss8651twtw/Secure-Programming
|
cf348597cb72558cc5cd8d50582860fadf014228
|
[
"MIT"
] | null | null | null |
2018/codegate/Miro/hack.py
|
ss8651twtw/Secure-Programming
|
cf348597cb72558cc5cd8d50582860fadf014228
|
[
"MIT"
] | 2
|
2019-07-06T03:46:38.000Z
|
2019-10-28T15:15:36.000Z
|
#!/usr/bin/env python
from fermat import fermat
p, q = fermat(0x1C20BDC017E3CAA3C579B40D439E2ECD70F12C4D7F2764784C95A3FDDBA00981BA9CE5B227ADE47B0A7A0A8ACABA4541AB95C52F6B6DE3DF9EC090C6C356445B21BE437ABE10214D0B4A398A96743BBF70C864687FB2EC929F01D6EDAB2D987FE09799AD2204A2704F33061DBF9C2E03B332F0BA1A446644C864A06CD586D480B)
print p, q
| 42.125
| 274
| 0.922849
| 15
| 337
| 20.733333
| 0.733333
| 0.012862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.482866
| 0.047478
| 337
| 7
| 275
| 48.142857
| 0.485981
| 0.059347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.81962
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.333333
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
733484cdfca22c03540f2693d3d1f87f9af290b4
| 4,479
|
py
|
Python
|
shinrl/solvers/discrete_vi/_target_mixin.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | 34
|
2021-12-09T07:12:57.000Z
|
2022-03-11T08:17:20.000Z
|
shinrl/solvers/discrete_vi/_target_mixin.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | null | null | null |
shinrl/solvers/discrete_vi/_target_mixin.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | 4
|
2021-12-11T07:48:01.000Z
|
2022-03-01T23:50:33.000Z
|
"""MixIns to compute the target value of VI-based algorithms.
Author: Toshinori Kitamura
Affiliation: NAIST & OSX
"""
from chex import Array
import shinrl as srl
class TargetMixIn:
def target_tabular_dp(self, data: srl.DataDict) -> Array:
raise NotImplementedError
def target_tabular_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
raise NotImplementedError
def target_deep_dp(self, data: srl.DataDict) -> Array:
raise NotImplementedError
def target_deep_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
raise NotImplementedError
class QTargetMixIn(TargetMixIn):
"""MixIn to compute the vanilla Q target."""
def target_tabular_dp(self, data: srl.DataDict) -> Array:
return srl.optimal_backup_dp(
data["Q"],
self.env.mdp.rew_mat,
self.env.mdp.tran_mat,
self.config.discount,
)
def target_tabular_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
return srl.optimal_backup_rl(
data["Q"][samples.next_state.squeeze(axis=1)], # BxA
samples.rew,
samples.done,
self.config.discount,
)
def target_deep_dp(self, data: srl.DataDict) -> Array:
return srl.optimal_backup_dp(
self.q_net.apply(data["QNetTargParams"], self.env.mdp.obs_mat),
self.env.mdp.rew_mat,
self.env.mdp.tran_mat,
self.config.discount,
)
def target_deep_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
return srl.optimal_backup_rl(
self.q_net.apply(data["QNetTargParams"], samples.next_obs),
samples.rew,
samples.done,
self.config.discount,
)
class DoubleQTargetMixIn(TargetMixIn):
"""MixIn to compute the Double Q target.
Paper: https://arxiv.org/abs/1509.06461
"""
def target_deep_dp(self, data: srl.DataDict) -> Array:
return srl.double_backup_dp(
self.q_net.apply(data["QNetTargParams"], self.env.mdp.obs_mat),
self.q_net.apply(data["QNetParams"], self.env.mdp.obs_mat),
self.env.mdp.rew_mat,
self.env.mdp.tran_mat,
self.config.discount,
)
def target_deep_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
return srl.double_backup_rl(
self.q_net.apply(data["QNetTargParams"], samples.next_obs),
self.q_net.apply(data["QNetParams"], samples.next_obs),
samples.rew,
samples.done,
self.config.discount,
)
class MunchausenTargetMixIn(TargetMixIn):
"""MixIn to compute the Munchausen Q target.
Paper: https://arxiv.org/abs/2007.14430
"""
def target_tabular_dp(self, data: srl.DataDict) -> Array:
return srl.munchausen_backup_dp(
data["Q"],
self.env.mdp.rew_mat,
self.env.mdp.tran_mat,
self.config.discount,
self.config.kl_coef,
self.config.er_coef,
self.config.logp_clip,
)
def target_tabular_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
return srl.munchausen_backup_rl(
data["Q"][samples.next_state.squeeze(axis=1)], # BxA
data["Q"][samples.state.squeeze(axis=1)], # BxA
samples.rew,
samples.done,
samples.act,
self.config.discount,
self.config.kl_coef,
self.config.er_coef,
self.config.logp_clip,
)
def target_deep_dp(self, data: srl.DataDict) -> Array:
return srl.munchausen_backup_dp(
self.q_net.apply(data["QNetTargParams"], self.env.mdp.obs_mat),
self.env.mdp.rew_mat,
self.env.mdp.tran_mat,
self.config.discount,
self.config.kl_coef,
self.config.er_coef,
self.config.logp_clip,
)
def target_deep_rl(self, data: srl.DataDict, samples: srl.Sample) -> Array:
return srl.munchausen_backup_rl(
self.q_net.apply(data["QNetTargParams"], samples.next_obs), # BxA
self.q_net.apply(data["QNetTargParams"], samples.obs), # BxA
samples.rew,
samples.done,
samples.act,
self.config.discount,
self.config.kl_coef,
self.config.er_coef,
self.config.logp_clip,
)
| 32.933824
| 82
| 0.602367
| 547
| 4,479
| 4.769653
| 0.149909
| 0.084324
| 0.059026
| 0.101955
| 0.898812
| 0.862016
| 0.842852
| 0.799923
| 0.799923
| 0.774243
| 0
| 0.006548
| 0.283992
| 4,479
| 135
| 83
| 33.177778
| 0.806985
| 0.073677
| 0
| 0.784314
| 0
| 0
| 0.029963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137255
| false
| 0
| 0.019608
| 0.098039
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73496e0ea9d41c27da277fca65101697f7c131d4
| 5,128
|
py
|
Python
|
pyuvwsim/__init__.py
|
SKA-ScienceDataProcessor/uvwsim
|
47271b43f80fb22e1baacc06555744355dca241d
|
[
"BSD-3-Clause"
] | 1
|
2016-11-22T09:20:50.000Z
|
2016-11-22T09:20:50.000Z
|
pyuvwsim/__init__.py
|
SKA-ScienceDataProcessor/uvwsim
|
47271b43f80fb22e1baacc06555744355dca241d
|
[
"BSD-3-Clause"
] | null | null | null |
pyuvwsim/__init__.py
|
SKA-ScienceDataProcessor/uvwsim
|
47271b43f80fb22e1baacc06555744355dca241d
|
[
"BSD-3-Clause"
] | 2
|
2015-12-11T07:55:07.000Z
|
2019-01-26T15:06:54.000Z
|
"""
pyuvwsim
--------
Experimental python interface to uvwsim.
"""
import _pyuvwsim
from numpy import asarray
from .version import __version__
def load_station_coords(file_name):
"""
Loads station coordinates from an ASCII layout file. The layout file
should be 2 or 3 columns of coordinates, which are either space,
comma, or tab separated.
Args:
file_name (string): File name path of the station coordinate file.
Returns:
(x, y, z) tuple of station coordinate arrays.
"""
return _pyuvwsim.load_station_coords(file_name)
def convert_enu_to_ecef(x, y, z, lon, lat, alt=0.0):
"""
Convert ENU (East, North, Up) to ECEF coordinates.
Args:
x (array-like): Array of x (East) coordinates, in metres.
y (array-like): Array of y (North) coordinates, in metres.
z (array-like): Array of z (Up) coordinates, in metres.
lon (double): Longitude, in radians.
lat (double): Latitude, in radians.
alt (Optional[double]): Altitude, in metres.
Returns:
(x, y, z) tuple of coordinate arrays, in metres.
"""
x = asarray(x)
y = asarray(y)
z = asarray(z)
return _pyuvwsim.convert_enu_to_ecef(x, y, z, lon, lat, alt)
def evaluate_baseline_uvw(x, y, z, ra, dec, mjd):
"""
Generate baseline coordinates from station ECEF coordinates, pointing
direction and time.
Args:
x (array-like): Array of x (ECEF) coordinates, in metres.
y (array-like): Array of y (ECEF) coordinates, in metres.
z (array-like): Array of z (ECEF) coordinates, in metres.
ra (double): Right Ascension of pointing direction, in radians.
dec (double): Declination of pointing direction, in radians.
mjd (double): Modified Julian date (UTC).
Returns:
(uu, vv, ww) tuple of baseline coordinate arrays, in metres.
"""
x = asarray(x)
y = asarray(y)
z = asarray(z)
return _pyuvwsim.evaluate_baseline_uvw(x, y, z, ra, dec, mjd)
def evaluate_baseline_uvw_ha_dec(x, y, z, ha, dec):
"""
Generate baseline coordinates from station ECEF coordinates, Hour angle,
and declination
Note:
Greenwich hour angle = hour angle - east longitude
eg. for the VLA, longitude = -107°37'03.819" east
a source is overhead when its Greenwich hour angle is
+107.6177275 degrees
Args:
x (array-like): Array of x (ECEF) coordinates, in metres.
y (array-like): Array of y (ECEF) coordinates, in metres.
z (array-like): Array of z (ECEF) coordinates, in metres.
ha (double): Greenwich hour angle, in radians (24h == 2pi).
dec (double): Declination of pointing direction, in radians.
Returns:
(uu, vv, ww) tuple of baseline coordinate arrays, in metres.
"""
x = asarray(x)
y = asarray(y)
z = asarray(z)
return _pyuvwsim.evaluate_baseline_uvw_ha_dec(x, y, z, ha, dec)
def evaluate_station_uvw(x, y, z, ra, dec, mjd):
"""
Generate station uvw coordinates from station ECEF coordinates, pointing
direction and time.
Args:
x (array-like): Array of x (ECEF) coordinates, in metres.
y (array-like): Array of y (ECEF) coordinates, in metres.
z (array-like): Array of z (ECEF) coordinates, in metres.
ra (double): Right Ascension of pointing direction, in radians.
dec (double): Declination of pointing direction, in radians.
mjd (double): Modified Julian date (UTC).
Returns:
(u, v, w) tuple of station uvw coordinate arrays, in metres.
"""
x = asarray(x)
y = asarray(y)
z = asarray(z)
return _pyuvwsim.evaluate_station_uvw(x, y, z, ra, dec, mjd)
def evaluate_station_uvw_ha_dec(x, y, z, ha, dec):
"""
Generate station uvw coordinates from station ECEF coordinates, pointing
direction and Greenwich hour angle.
Note:
Greenwich hour angle = hour angle - east longitude
eg. for the VLA, longitude = -107°37'03.819" east
a source is overhead when its Greenwich hour angle is
+107.6177275 degrees
Args:
x (array-like): Array of x (ECEF) coordinates, in metres.
y (array-like): Array of y (ECEF) coordinates, in metres.
z (array-like): Array of z (ECEF) coordinates, in metres.
ha (double): Greenwich hour angle (24h == 2pi), in radians.
dec (double): Declination of pointing direction, in radians.
Returns:
(u, v, w) tuple of station uvw coordinate arrays, in metres.
"""
x = asarray(x)
y = asarray(y)
z = asarray(z)
return _pyuvwsim.evaluate_station_uvw_ha_dec(x, y, z, ha, dec)
def datetime_to_mjd(year, month, day, hour, minute, seconds):
"""
Convert datetime to Modified Julian date.
Args:
year (int): Year.
month (int): Month.
day (int): Day.
hour (int): Hour.
minute (int): Minute.
seconds (double): Seconds.
Returns:
double, Modified Julian date.
"""
return _pyuvwsim.datetime_to_mjd(year, month, day, hour, minute, seconds)
| 30.706587
| 77
| 0.635725
| 713
| 5,128
| 4.500701
| 0.171108
| 0.052353
| 0.065441
| 0.07479
| 0.777812
| 0.762231
| 0.751636
| 0.728264
| 0.725771
| 0.630103
| 0
| 0.013214
| 0.26209
| 5,128
| 166
| 78
| 30.891566
| 0.834302
| 0.681747
| 0
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.21875
| false
| 0
| 0.09375
| 0
| 0.53125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
b440969ede704b3661fcb87acbbd2148cfe931d2
| 187
|
py
|
Python
|
tests/test_model.py
|
mamo3gr/arcface_tensorflow
|
acc36a857e04e47dbd20f2ca29356c1b9a226b1e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_model.py
|
mamo3gr/arcface_tensorflow
|
acc36a857e04e47dbd20f2ca29356c1b9a226b1e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_model.py
|
mamo3gr/arcface_tensorflow
|
acc36a857e04e47dbd20f2ca29356c1b9a226b1e
|
[
"Apache-2.0"
] | null | null | null |
from model import create_model
# def test_create_model():
# input_shape = (224, 224, 3)
# n_classes = 10
# model = create_model(input_shape, n_classes)
# model.summary()
| 23.375
| 50
| 0.668449
| 26
| 187
| 4.5
| 0.538462
| 0.282051
| 0.273504
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061644
| 0.219251
| 187
| 7
| 51
| 26.714286
| 0.739726
| 0.770053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c33ced99b4774a15ee6ad5f2c06f33fa9433d6ea
| 174,001
|
py
|
Python
|
preprocessing.py
|
KrisG04/encoder-decoder-text-summarizer
|
db2680a89bc4d6a0651cf734445a376da73d4ffb
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
KrisG04/encoder-decoder-text-summarizer
|
db2680a89bc4d6a0651cf734445a376da73d4ffb
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
KrisG04/encoder-decoder-text-summarizer
|
db2680a89bc4d6a0651cf734445a376da73d4ffb
|
[
"MIT"
] | null | null | null |
import collections
import pickle
import nltk
import re
from pycontractions import Contractions
import time
import datetime
import numpy as np
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
import gensim
import utils
import os
from nltk.corpus import wordnet
class DataPreprocessing:
def count_words_of_embedding_file(self, word2int_dict_pickle_file_path, word2vec_keyedvector_file_path):
word2int_dict = self.read_pickle_file(word2int_dict_pickle_file_path)
w2v = gensim.models.KeyedVectors.load_word2vec_format(word2vec_keyedvector_file_path)
count_existing_words = 0
count_overall_words = 0
count_absent_words = 0
absent_words_dict = dict()
for word, _ in word2int_dict.items():
count_overall_words += 1
if count_overall_words % 5000 == 0:
print('count overall, existing & absent words {}, {} & {}'.format(count_overall_words,
count_existing_words,
count_absent_words))
try:
word = word.split(sep='_')[0]
_ = w2v.word_vec(word)
count_existing_words += 1
except KeyError:
count_absent_words += 1
if absent_words_dict.get(word, None):
freq = absent_words_dict[word]
absent_words_dict[word] = freq + 1
else:
absent_words_dict[word] = 1
absent_words_list = []
for k, v in absent_words_dict.items():
absent_words_list.append((k, v))
absent_words_list = sorted(absent_words_list, key=lambda tup: -tup[1])
for (word, freq) in absent_words_list:
print('{} {}'.format(word, freq))
print('\ncount_existing_words {} {:.3f}%'.format(count_existing_words,
count_existing_words * 100.0 / count_overall_words))
print(
'count_absent_words {} {:.3f}'.format(count_absent_words, count_absent_words * 100.0 / count_overall_words))
print('count_words {} {:.3f}%'.format(count_overall_words, count_overall_words * 100.0 / count_overall_words))
return
@staticmethod
def read_pickle_file(path_to_pickle_file):
with open(path_to_pickle_file, "rb") as f:
b = pickle.load(f)
return b
def convert_binary_file_to_txt_file(self, binary_file_path, txt_file_path, read_per_megabytes=8):
txt_file = open(txt_file_path, 'w', encoding='utf8')
with open(binary_file_path, "rb") as f:
read_bytes = read_per_megabytes * 1048576
count_MB = read_per_megabytes
bytes = f.read(read_bytes)
while bytes:
txt_file.write(" ".join(map(str, bytes)))
bytes = f.read(read_bytes)
print('MB: {}'.format(count_MB))
count_MB += read_per_megabytes
txt_file.write('\n')
print('Binary file is converted.')
return txt_file_path
# it writes a binary and an txt file with random numbers of a range of given numbers and
# return the list of random numbers
@staticmethod
def generate_a_range_of_random_numbers(test_path_to_pickle_file, test_path_to_txt_file,
val_path_to_pickle_file, val_path_to_txt_file,
test_num_of_samples, val_num_of_samples,
start=1, end=144986):
numbers = []
for i in range(start, end + 1):
numbers.append(i)
selected_random_numbers = shuffle(numbers, random_state=4453664,
n_samples=test_num_of_samples + val_num_of_samples)
test_random_numbers = []
val_random_numbers = []
for i in range(0, test_num_of_samples):
test_random_numbers.append(selected_random_numbers[i])
for i in range(test_num_of_samples, test_num_of_samples + val_num_of_samples):
val_random_numbers.append(selected_random_numbers[i])
with open(test_path_to_pickle_file, 'wb') as f:
pickle.dump(test_random_numbers, f)
with open(val_path_to_pickle_file, 'wb') as f:
pickle.dump(val_random_numbers, f)
with open(test_path_to_txt_file, 'w', encoding='utf8') as f:
for i in test_random_numbers:
f.write('{}\n'.format(i))
with open(val_path_to_txt_file, 'w', encoding='utf8') as f:
for i in val_random_numbers:
f.write('{}\n'.format(i))
print('rows: {}, total samples: {}, test samples: {}, val samples: {}'.format(len(numbers),
len(selected_random_numbers),
len(test_random_numbers),
len(val_random_numbers)))
def create_testing_and_validation_initial_subset(self, test_initial_article_path,
test_initial_summary_path,
test_subset_initial_article_file_path,
test_subset_initial_summary_file_path,
validation_initial_article_file_path,
validation_initial_summary_file_path,
test_random_lines_file_path,
validation_random_lines_file_path):
print('creating_testing_and_validation_initial_subset...')
article_read_f = open(test_initial_article_path, 'r', encoding='utf8')
summary_read_f = open(test_initial_summary_path, 'r', encoding='utf8')
pairs = []
for article_line, summary_line in zip(article_read_f, summary_read_f):
pairs.append((article_line, summary_line))
test_lines_list = []
val_lines_list = []
with open(test_random_lines_file_path, "rb") as f:
test_lines_list = pickle.load(f)
with open(validation_random_lines_file_path, "rb") as f:
val_lines_list = pickle.load(f)
with open(test_subset_initial_article_file_path, 'w', encoding='utf8') as a:
with open(test_subset_initial_summary_file_path, 'w', encoding='utf8') as s:
for i in test_lines_list:
p = pairs[i]
a.write(p[0])
s.write(p[1])
with open(validation_initial_article_file_path, 'w', encoding='utf8') as a:
with open(validation_initial_summary_file_path, 'w', encoding='utf8') as s:
for i in val_lines_list:
p = pairs[i]
a.write(p[0])
s.write(p[1])
def view_line_with_phrase(self, file_path, phrase, without_list, num_of_lines=555444333):
with open(file_path, 'r', encoding='utf8') as f:
count_lines = 0
count_lines_with_phrase = 0
token_dict = dict()
for line in f:
count_lines += 1
index = line.find(phrase)
without_flag = True
for s in without_list:
if line.find(s) > -1:
without_flag = False
break
if index > -1 and without_flag:
count_lines_with_phrase += 1
print("{} {}".format(count_lines, line))
if count_lines > num_of_lines:
print("Break at line: {}".format(count_lines))
break
print('Cases: {}'.format(count_lines_with_phrase))
print('Lines: {}'.format(count_lines))
print('Lines with phrase ({}): {} ({}%)'.format(phrase,
count_lines_with_phrase,
round(count_lines_with_phrase * 100 / count_lines, 2)))
def view_n_gram_with_symbol_and_phrase(self, file_path, n_gram, symbol, phrase, without_list, freq_greater_than):
with open(file_path, 'r', encoding='utf8') as f:
count_lines = 0
count_lines_with_symbol = 0
token_dict = dict()
for line in f:
count_lines += 1
index = line.find(phrase)
if index > -1:
without_flag = True
for s in without_list:
if line.find(s) > -1:
without_flag = False
break
if without_flag:
# print(line)
line_split_list = line.split()
line_length = len(line_split_list)
word_index = -1
for w in line_split_list:
word_index += 1
if w.find(symbol) > -1:
pre_w = ''
post_w = ''
post_post_w = ''
post_post_post_w = ''
if word_index > 0 and n_gram > 1:
pre_w = line_split_list[word_index - 1]
if word_index + 1 < line_length and n_gram > 2:
post_w = line_split_list[word_index + 1]
if word_index + 2 < line_length and n_gram > 3:
post_post_w = line_split_list[word_index + 2]
if word_index + 3 < line_length and n_gram > 4:
post_post_post_w = line_split_list[word_index + 3]
n_gram_phrase = pre_w + ' ' + w + ' ' + post_w + ' ' + post_post_w + post_post_post_w
if token_dict.get(n_gram_phrase, None):
freq = token_dict[n_gram_phrase]
token_dict[n_gram_phrase] = freq + 1
count_lines_with_symbol += 1
else:
token_dict[n_gram_phrase] = 1
count_lines_with_symbol += 1
token_list = []
for key, value in token_dict.items():
temp = (key, value)
token_list.append(temp)
symbol_list = sorted(token_list, key=lambda tup: -tup[1])
print('\n\n')
for i in symbol_list:
if i[1] > freq_greater_than:
print('{} {}'.format(i[0], i[1]))
print('\nDistinct cases: {}'.format(len(symbol_list)))
print('Lines: {}'.format(count_lines))
print('Lines with {}-gram ({}): {} ({}%)'.format(n_gram, symbol, count_lines_with_symbol,
round(count_lines_with_symbol * 100 / count_lines, 2)))
def vocab_count_words_and_statistics(self, article_path, model_summary_path, reports_dir,
report_id='report', debug=False):
start_time = time.time()
article_tokens_dict = {}
summary_tokens_dict = {}
count_article_tokens = 0
count_summary_tokens = 0
count_article_distinct_tokens = 0
count_summary_distinct_tokens = 0
distinct_tokens_set = set()
article_length_list = []
summary_length_list = []
line_counter = 0
print("Reading files:\n {}\n {}".format(article_path, model_summary_path))
article_read_f = open(article_path, 'r', encoding='utf8')
summary_read_f = open(model_summary_path, 'r', encoding='utf8')
t1 = t2 = t3 = t4 = t5 = t6 = 0
for article_line, summary_line in zip(article_read_f, summary_read_f):
line_counter += 1
# t = time.time()
article_line_tokens = article_line.split() # nltk.tokenize.word_tokenize(article_line) #
# t1 += (time.time() - t)
# t = time.time()
summary_line_tokens = summary_line.split() # nltk.tokenize.word_tokenize(summary_line) #
# t2 += (time.time() - t)
# t = time.time()
article_length_list.append(len(article_line_tokens))
# t3 += (time.time() - t)
# t = time.time()
summary_length_list.append(len(summary_line_tokens))
# t4 += (time.time() - t)
# t = time.time()
for toc in article_line_tokens:
if toc not in article_tokens_dict.keys():
article_tokens_dict[toc] = 1
count_article_tokens += 1
count_article_distinct_tokens += 1
distinct_tokens_set.add(toc)
else:
freq = article_tokens_dict[toc]
article_tokens_dict[toc] = freq + 1
count_article_tokens += 1
# if article_tokens_dict.get(toc, None):
# freq = article_tokens_dict[toc]
# article_tokens_dict[toc] = freq + 1
# count_article_tokens += 1
# else:
# article_tokens_dict[toc] = 1
# count_article_tokens += 1
# count_article_distinct_tokens += 1
for toc in summary_line_tokens:
if toc not in summary_tokens_dict.keys():
summary_tokens_dict[toc] = 1
count_summary_tokens += 1
count_summary_distinct_tokens += 1
distinct_tokens_set.add(toc)
else:
freq = summary_tokens_dict[toc]
summary_tokens_dict[toc] = freq + 1
count_summary_tokens += 1
# if summary_tokens_dict.get(toc, None):
# freq = summary_tokens_dict[toc]
# summary_tokens_dict[toc] = freq + 1
# count_summary_tokens += 1
# else:
# summary_tokens_dict[toc] = 1
# count_summary_tokens += 1
# count_summary_distinct_tokens += 1
if line_counter % 100000 == 0:
print('{} lines, Time: {}'.format(line_counter,
datetime.timedelta(seconds=round(time.time() - start_time, 1))))
if debug:
debug_lines = 100000
if line_counter > debug_lines:
print('t1 article_line_tokens {}\n'
't2 summary_line_tokens {}\n'
't3 article_length_list {}\n'
't4 summary_length_list {}\n'
't5 article_tokens_dict {}\n'
't6 summary_tokens_dict {}\n'.format(datetime.timedelta(seconds=t1),
datetime.timedelta(seconds=t2),
datetime.timedelta(seconds=t3),
datetime.timedelta(seconds=t4),
datetime.timedelta(seconds=t5),
datetime.timedelta(seconds=t6)))
break
print("tokens for articles and summaries file have been added to dictionaries")
article_read_f.close()
summary_read_f.close()
article_tokens_list = []
summary_tokens_list = []
for k, v in article_tokens_dict.items():
article_tokens_list.append((k, v))
for k, v in summary_tokens_dict.items():
summary_tokens_list.append((k, v))
article_tokens_list = sorted(article_tokens_list, key=lambda tup: -tup[1])
summary_tokens_list = sorted(summary_tokens_list, key=lambda tup: -tup[1])
print('Tokens have been added to sorted lists')
article_vocabulary_path = '{}{}_vocab_article.txt'.format(reports_dir, report_id)
summary_vocabulary_path = '{}{}_vocab_summary.txt'.format(reports_dir, report_id)
report_file_path = '{}{}_report.txt'.format(reports_dir, report_id)
chart_file_path = '{}{}_chart.pdf'.format(reports_dir, report_id)
print('Writing files:\n {}\n {}\n {}\n {}'.format(article_vocabulary_path,
summary_vocabulary_path,
report_file_path,
chart_file_path))
article_max_len = np.max(np.array(article_length_list))
article_min_len = np.min(np.array(article_length_list))
article_avg_len = np.mean(np.array(article_length_list))
article_var_len = np.var(np.array(article_length_list))
article_std_len = np.std(np.array(article_length_list))
summary_max_len = np.max(np.array(summary_length_list))
summary_min_len = np.min(np.array(summary_length_list))
summary_avg_len = np.mean(np.array(summary_length_list))
summary_var_len = np.var(np.array(summary_length_list))
summary_std_len = np.std(np.array(summary_length_list))
count_distinct_tokens = len(distinct_tokens_set)
del distinct_tokens_set
with open(article_vocabulary_path, 'w+', encoding='utf8') as f:
for a in article_tokens_list:
f.write('{} {}\n'.format(a[0], a[1]))
with open(summary_vocabulary_path, 'w+', encoding='utf8') as f:
for a in summary_tokens_list:
f.write('{} {}\n'.format(a[0], a[1]))
print('Vocabulary files for both articles and summaries have been written')
with open(report_file_path, 'w+', encoding='utf8') as f:
comment = 'reports for {} data files'.format(report_id)
f.write("Reports of dataset files.\nComment: {}\n\n".format(comment))
f.write("Reading files:\n {}\n {}\n\n".format(article_path, model_summary_path))
f.write('Writing files:\n {}\n {}\n {}\n {}\n\n'.format(article_vocabulary_path,
summary_vocabulary_path,
report_file_path,
chart_file_path))
f.write('Vocabulary size: {}\n\n'.format(count_distinct_tokens))
f.write('Articles:'
'\n number of instances {}'
'\n tokens: {}'
'\n distinct tokens: {} {:.4f}%'
'\n min, max & avg length: {}, {}, {:.3f}'
'\n var & std of length:'
' {:.3f} & {:.3f}\n\n'.format(line_counter,
count_article_tokens,
count_article_distinct_tokens,
100 * count_article_distinct_tokens / count_article_tokens,
article_min_len, article_max_len, article_avg_len,
article_var_len, article_std_len
))
f.write('Summaries:'
'\n number of instances {}'
'\n tokens: {}'
'\n distinct tokens: {} {:.4f}%'
'\n min, max & avg length: {}, {} & {:.3f}'
'\n var & std of length:'
' {:.3f} & {:.3f}\n\n'.format(line_counter,
count_summary_tokens,
count_summary_distinct_tokens,
100 * count_summary_distinct_tokens / count_summary_tokens,
summary_min_len, summary_max_len, summary_avg_len,
summary_var_len, summary_std_len
))
# f.write('article_length_list = {}\n\n'.format(article_length_list))
# f.write('summary_length_list = {}\n\n'.format(summary_length_list))
print('Report file has been written.')
# x = np.random.normal(size=1000)
plt.figure(1)
plt.subplot(211)
plt.hist(article_length_list, histtype='barstacked', density=True, bins=article_max_len) # normed=True,
plt.ylabel('Probability')
plt.ylabel('Article length')
# plt.savefig(reports_dir + reports_filename + '_chart_artcicle_len_distr.pdf')
plt.subplot(212)
plt.hist(summary_length_list, histtype='stepfilled', density=True, bins=article_max_len)
plt.ylabel('Probability')
plt.ylabel('Summary length')
plt.tick_params()
plt.tight_layout()
plt.savefig(chart_file_path)
plt.close()
print('charts have been plotted.')
print('Process finished.')
with open(report_file_path, 'r', encoding='utf8') as f:
content = f.read()
print(content)
# remove duplicates, instances with longer summaries than articles and instances with <unk>
# from the original data to the initial data
def clean_duc_dataset_from_original_to_cleaned(self, article_path,
model_summary1_path, model_summary2_path,
model_summary3_path, model_summary4_path,
new_article_path,
new_model_summary1_path, new_model_summary2_path,
new_model_summary3_path, new_model_summary4_path,
word2vec_path,
print_per_lines=50):
print('Cleaning... from orgiginal to cleaned DUC data...')
start_time = time.time()
cont = Contractions(word2vec_path)
article_read_f = open(article_path, 'r', encoding='utf8')
summary1_read_f = open(model_summary1_path, 'r', encoding='utf8')
summary2_read_f = open(model_summary2_path, 'r', encoding='utf8')
summary3_read_f = open(model_summary3_path, 'r', encoding='utf8')
summary4_read_f = open(model_summary4_path, 'r', encoding='utf8')
article_write_f = open(new_article_path, 'w+', encoding='utf8')
summary1_write_f = open(new_model_summary1_path, 'w+', encoding='utf8')
summary2_write_f = open(new_model_summary2_path, 'w+', encoding='utf8')
summary3_write_f = open(new_model_summary3_path, 'w+', encoding='utf8')
summary4_write_f = open(new_model_summary4_path, 'w+', encoding='utf8')
count_input_lines = 0
count_output_lines = 0
for article_line, summary1_line, summary2_line, summary3_line, summary4_line in zip(article_read_f,
summary1_read_f,
summary2_read_f,
summary3_read_f,
summary4_read_f):
count_input_lines += 1
article_line_new = self.clean_text(article_line.lower(), cont)
summary1_line_new = self.clean_text(summary1_line.lower(), cont)
summary2_line_new = self.clean_text(summary2_line.lower(), cont)
summary3_line_new = self.clean_text(summary3_line.lower(), cont)
summary4_line_new = self.clean_text(summary4_line.lower(), cont)
article_write_f.write(article_line_new + '\n')
summary1_write_f.write(summary1_line_new + '\n')
summary2_write_f.write(summary2_line_new + '\n')
summary3_write_f.write(summary3_line_new + '\n')
summary4_write_f.write(summary4_line_new + '\n')
if count_input_lines % print_per_lines == 0:
print('{}, {}, {} (input line, output line and time for cleaning)'.format(
count_input_lines,
count_output_lines,
datetime.timedelta(
seconds=round(
time.time() - start_time,
0))))
print(' art__in: {}'.format(article_line), end='')
print(' art_out: {}'.format(article_line_new), end='\n')
print(' sum1__in: {}'.format(summary1_line), end='')
print(' sum1_out: {}'.format(summary1_line_new), end='\n')
print(' sum2__in: {}'.format(summary2_line), end='')
print(' sum2_out: {}'.format(summary2_line_new), end='\n')
print(' sum3__in: {}'.format(summary3_line), end='')
print(' sum3_out: {}'.format(summary3_line_new), end='\n')
print(' sum4__in: {}'.format(summary4_line), end='')
print(' sum4_out: {}'.format(summary4_line_new), end='\n')
article_read_f.close()
summary1_read_f.close()
summary2_read_f.close()
summary3_read_f.close()
summary4_read_f.close()
# print('\ncount_shorter_articles_than_summary: {}'.format(count_shorter_articles_than_summary))
article_write_f.close()
summary1_write_f.close()
summary2_write_f.close()
summary3_write_f.close()
summary4_write_f.close()
# remove duplicates and clean dataset
def clean_dataset(self, article_path, model_summary_path, new_article_path, new_model_summary_path,
word2vec_path, print_per_lines=10000, debug=False):
print('Cleaning dataset...')
start_time = time.time()
article_read_f = open(article_path, 'r', encoding='utf8')
summary_read_f = open(model_summary_path, 'r', encoding='utf8')
article_write_f = open(new_article_path, 'w+', encoding='utf8')
summary_write_f = open(new_model_summary_path, 'w+', encoding='utf8')
cont = Contractions(word2vec_path)
print('word2vec vectors have been loaded')
count_input_lines = 0
article_summary_set = set()
for article_line, summary_line in zip(article_read_f, summary_read_f):
article_summary_set.add((article_line, summary_line))
count_input_lines += 1
if count_input_lines % print_per_lines == 0:
print('{}, {} (input line and time for removing duplicates)'.format(count_input_lines,
datetime.timedelta(
seconds=round(
time.time() - start_time,
0))))
if debug:
if count_input_lines == 1000:
break
article_read_f.close()
summary_read_f.close()
count_input_lines = 0
count_output_lines = 0
count_shorter_articles_than_summary = 0
for el in article_summary_set:
article_line = el[0]
summary_line = el[1]
count_input_lines += 1
length_article = len(article_line)
length_summary = len(summary_line)
if length_article > length_summary:
count_output_lines += 1
article_line_new = self.clean_text(article_line, cont)
summary_line_new = self.clean_text(summary_line, cont)
article_write_f.write(article_line_new + '\n')
summary_write_f.write(summary_line_new + '\n')
if count_input_lines % print_per_lines == 0:
print('{}, {}, {} (input line, output line and time for cleaning)'.format(count_input_lines,
count_output_lines,
datetime.timedelta(
seconds=round(
time.time() - start_time,
0))))
print(' art__in: {}'.format(article_line), end='')
print(' art_out: {}'.format(article_line_new), end='\n')
print(' sum__in: {}'.format(summary_line), end='')
print(' sum_out: {}'.format(summary_line_new), end='\n')
else:
count_shorter_articles_than_summary += 1
if debug:
if count_input_lines == 1000:
break
print('\ncount_shorter_articles_than_summary: {}'.format(count_shorter_articles_than_summary))
article_write_f.close()
summary_write_f.close()
# remove duplicates and clean dataset
@staticmethod
def clean_text(text, contractions):
text = re.sub("-lrb-(.*?)-rrb-", "", text)
text = text.replace('-lrb-', '').replace('-rrb-', '').replace('.', '')
table = str.maketrans({key: ' ' for key in '!"$%&()*+,-./:;<=>?@[\]^_`{|}~'})
text = text.translate(table)
# text = text.replace('-', '')
text = re.sub("[0-9]", "#", text)
text = re.sub(' +', ' ', text)
# text = "ca n't and wo n't and i 'm and you 'd have and '' it is n't"
text = text.replace("ca n't", "can not"). \
replace("wo n't", "will not"). \
replace("n't", "not"). \
replace("''", " "). \
replace(" 'm ", " am "). \
replace(" 've ", " have "). \
replace(" 're ", " are ")
text = re.sub(' +', ' ', text)
text = text.replace(" '", "'"). \
replace('were', 'WWEERREE')
text = list(contractions.expand_texts([text], precise=True, scores=False))[0]
text = text.replace('WWEERREE', 'were'). \
replace("'s ", " 's "). \
replace("' s ", " 's "). \
replace(" and'#", " and '#"). \
replace(" the'#", " the '#"). \
replace("'#s ", " '#s "). \
replace("' ", " ' "). \
replace("\r", "").replace("\n", ""). \
replace("'#", " '#")
text = text.replace("'s", " 's").replace("'#", " '#")
text = text.replace("'n", " 'n"). \
replace("'em", " 'em").replace("'d", " 'd").replace("'m", " 'm").replace("\n", "")
text = re.sub(' +', ' ', text)
text = text.strip()
return text
def remove_duplicates(self, article_path, model_summary_path, new_article_path, new_model_summary_path,
print_per_lines=10000, debug=False):
print('remove duplicates...')
start_time = time.time()
article_read_f = open(article_path, 'r', encoding='utf8')
summary_read_f = open(model_summary_path, 'r', encoding='utf8')
article_write_f = open(new_article_path, 'w+', encoding='utf8')
summary_write_f = open(new_model_summary_path, 'w+', encoding='utf8')
count_input_lines = 0
article_summary_set = set()
for article_line, summary_line in zip(article_read_f, summary_read_f):
article_summary_set.add((article_line, summary_line))
count_input_lines += 1
if count_input_lines % print_per_lines == 0:
print('{}, {} (input line and time)'.format(count_input_lines,
datetime.timedelta(
seconds=round(
time.time() - start_time, 0))))
# Early stopping for testing
if debug:
if count_input_lines == 1000:
break
for el in article_summary_set:
article_write_f.write(el[0])
summary_write_f.write(el[1])
# print('count_shorter_articles_than_summary: {}'.format(count_shorter_articles_than_summary))
article_read_f.close()
summary_read_f.close()
article_write_f.close()
summary_write_f.close()
@staticmethod
def clean_es_text(text):
text = text.replace("'s", " 's").replace("'#", " '#")
text = text.replace("'n", " 'n"). \
replace("'em", " 'em").replace("'d", " 'd").replace("'m", " 'm").replace("\n", "")
text = re.sub(' +', ' ', text)
text = text.strip()
return text
# clean words such as mary's which will be done mery 's
def clean_es_dataset(self, article_path, model_summary_path, new_article_path, new_model_summary_path,
print_per_lines=10000, debug=False):
print('Cleaning dataset...')
start_time = time.time()
article_read_f = open(article_path, 'r', encoding='utf8')
summary_read_f = open(model_summary_path, 'r', encoding='utf8')
article_write_f = open(new_article_path, 'w+', encoding='utf8')
summary_write_f = open(new_model_summary_path, 'w+', encoding='utf8')
count_input_lines = 0
for article_line, summary_line in zip(article_read_f, summary_read_f):
count_input_lines += 1
article_line_new = self.clean_es_text(article_line)
summary_line_new = self.clean_es_text(summary_line)
article_write_f.write(article_line_new + '\n')
summary_write_f.write(summary_line_new + '\n')
if count_input_lines % print_per_lines == 0:
print('{}, {} (line and time for cleaning)'.format(count_input_lines, datetime.timedelta(
seconds=round(
time.time() - start_time,
0))))
print(' art__in: {}'.format(article_line), end='')
print(' art_out: {}'.format(article_line_new), end='\n')
print(' sum__in: {}'.format(summary_line), end='')
print(' sum_out: {}'.format(summary_line_new), end='\n')
if debug:
if count_input_lines == 1000:
break
article_write_f.close()
summary_write_f.close()
a = ""
def noun_word_freq_hypernympaths(self, input_word_pos_freq_pickle_file_path,
input_word_freq_hypernyms_pickle_file_path,
output_noun_freq_pickle_file_path,
output_noun_freq_txt_file_path):
input_word_pos_freq_dict = utils.read_pickle_file(input_word_pos_freq_pickle_file_path)
input_word_freq_hypernyms_dict = utils.read_pickle_file(input_word_freq_hypernyms_pickle_file_path)
output_noun_freq_hypernyms_dict = dict()
max_freq = 0
for (word, pos), freq in input_word_pos_freq_dict.items():
if pos == 'n':
if freq > max_freq:
max_freq = freq
log_max_freq = np.log10(max_freq + 1)
for (word, pos), freq in input_word_pos_freq_dict.items():
if pos == 'n':
if input_word_freq_hypernyms_dict.get(word, None):
_, _, hypernym_detph_list = input_word_freq_hypernyms_dict[word]
output_noun_freq_hypernyms_dict[word] = (
freq, np.log10(freq + 1) / log_max_freq, hypernym_detph_list)
else:
output_noun_freq_hypernyms_dict[word] = (freq, np.log10(freq + 1) / log_max_freq, [])
output_noun_freq_hypernyms_list = []
for word, (freq, norm_freq, hypernyms) in output_noun_freq_hypernyms_dict.items():
output_noun_freq_hypernyms_list.append((word, freq, norm_freq, hypernyms))
output_noun_freq_hypernyms_list = utils.sort_by_second(output_noun_freq_hypernyms_list)
with open(output_noun_freq_txt_file_path, 'w', encoding='utf8') as f:
for (word, freq, norm_freq, hypernyms) in output_noun_freq_hypernyms_list:
f.write('{} {} {} {}'.format(word, freq, norm_freq, hypernyms) + '\n')
with open(output_noun_freq_pickle_file_path, 'wb') as f:
pickle.dump(output_noun_freq_hypernyms_dict, f)
def conver_dataset_with_ner_from_wordnet(self, input_article_pos_pickle_file_path,
input_summary_pos_pickle_file_path,
input_word_freq_hypernyms_pickle_file_path,
output_article_file_path, output_summary_file_path,
norm_freq_thresold=1.1,
print_per_line=100000):
article_word_pos_per_line_list = utils.read_pickle_file(input_article_pos_pickle_file_path)
summary_word_pos_per_line_list = utils.read_pickle_file(input_summary_pos_pickle_file_path)
word_freq_hypernyms_dict = utils.read_pickle_file(input_word_freq_hypernyms_pickle_file_path)
# stanford_ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
wordnet_ner_tags = ['person', 'location', 'organization']
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
count_article_wordnet_replacements = 0
count_summary_wordnet_replacements = 0
line_count = 0
for article_pos_line_list, summary_pos_line_list in \
zip(article_word_pos_per_line_list, summary_word_pos_per_line_list):
line_count += 1
article_line = ''
summary_line = ''
new_article_line = ''
new_summary_line = ''
for (token, pos) in article_pos_line_list:
article_line += token + ' '
if pos == 'n' and word_freq_hypernyms_dict.get(token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[token]
# hypernyms_path_list = word_freq_hypernyms_dict[token][2]
flag = True
if norm_freq < norm_freq_thresold:
for (hyp, depth) in hypernyms_path_list:
if hyp in wordnet_ner_tags:
new_article_line += hyp + '_ '
count_article_wordnet_replacements += 1
flag = False
break
if flag:
new_article_line += token + ' '
else:
new_article_line += token + ' '
for (token, pos) in summary_pos_line_list:
summary_line += token + ' '
if pos == 'n' and word_freq_hypernyms_dict.get(token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[token]
# hypernyms_path_list = word_freq_hypernyms_dict[token][2]
flag = True
if norm_freq < norm_freq_thresold:
for (hyp, depth) in hypernyms_path_list:
if hyp in wordnet_ner_tags:
new_summary_line += hyp + '_ '
count_summary_wordnet_replacements += 1
flag = False
break
if flag:
new_summary_line += token + ' '
else:
new_summary_line += token + ' '
output_article_file.write(new_article_line.strip() + '\n')
output_summary_file.write(new_summary_line.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, article_line, new_article_line, summary_line, new_summary_line))
print('\nArticle wordnet replacements: {}\nSummary wordnet replacements: {}\n'
'Overall wordnet replacements: {}\n'.format(count_article_wordnet_replacements,
count_summary_wordnet_replacements,
count_article_wordnet_replacements +
count_summary_wordnet_replacements))
output_article_file.close()
output_summary_file.close()
print('\nArticle wordnet replacements: {}\nSummary wordnet replacements: {}\n'
'Overall wordnet replacements: {}\n'.format(count_article_wordnet_replacements,
count_summary_wordnet_replacements,
count_article_wordnet_replacements +
count_summary_wordnet_replacements))
def conver_duc_dataset_with_ner_from_stanford_and_wordnet(self, input_article_ner_pickle_file_path,
input_summary1_ner_pickle_file_path,
input_summary2_ner_pickle_file_path,
input_summary3_ner_pickle_file_path,
input_summary4_ner_pickle_file_path,
input_article_pos_pickle_file_path,
input_summary1_pos_pickle_file_path,
input_summary2_pos_pickle_file_path,
input_summary3_pos_pickle_file_path,
input_summary4_pos_pickle_file_path,
input_word_freq_hypernyms_pickle_file_path,
output_article_file_path,
output_summary1_file_path,
output_summary2_file_path,
output_summary3_file_path,
output_summary4_file_path,
word_freq_thresold=9999990,
print_per_line=100000):
word_freq_hypernyms_dict = utils.read_pickle_file(input_word_freq_hypernyms_pickle_file_path)
article_word_ner_per_line_list = utils.read_pickle_file(input_article_ner_pickle_file_path)
print('1/9')
summary1_word_ner_per_line_list = utils.read_pickle_file(input_summary1_ner_pickle_file_path)
summary2_word_ner_per_line_list = utils.read_pickle_file(input_summary2_ner_pickle_file_path)
summary3_word_ner_per_line_list = utils.read_pickle_file(input_summary3_ner_pickle_file_path)
summary4_word_ner_per_line_list = utils.read_pickle_file(input_summary4_ner_pickle_file_path)
print('2/9')
stanford_ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary1_file = open(output_summary1_file_path, 'w', encoding='utf8')
output_summary2_file = open(output_summary2_file_path, 'w', encoding='utf8')
output_summary3_file = open(output_summary3_file_path, 'w', encoding='utf8')
output_summary4_file = open(output_summary4_file_path, 'w', encoding='utf8')
count_article_driven_replacements = 0
count_summary_driven_replacements = 0
line_count = 0
for article_line_list, summary1_line_list, summary2_line_list, summary3_line_list, summary4_line_list \
in zip(article_word_ner_per_line_list, summary1_word_ner_per_line_list, summary2_word_ner_per_line_list,
summary3_word_ner_per_line_list, summary4_word_ner_per_line_list):
line_count += 1
article_line = ' '
summary1_line = ' '
summary2_line = ' '
summary3_line = ' '
summary4_line = ' '
for (article_token, article_ner) in article_line_list:
article_line += article_token + ' '
for (summary_token, summary_ner) in summary1_line_list:
summary1_line += summary_token + ' '
for (summary_token, summary_ner) in summary2_line_list:
summary2_line += summary_token + ' '
for (summary_token, summary_ner) in summary3_line_list:
summary3_line += summary_token + ' '
for (summary_token, summary_ner) in summary4_line_list:
summary4_line += summary_token + ' '
new_article_line = article_line
new_summary1_line = summary1_line
new_summary2_line = summary2_line
new_summary3_line = summary3_line
new_summary4_line = summary4_line
for (article_token, article_ner) in article_line_list:
if article_ner in stanford_ner_tags:
change_flag = True
if word_freq_hypernyms_dict.get(article_token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[article_token]
if freq > word_freq_thresold:
change_flag = False
if change_flag:
token_find = ' {} '.format(article_token)
find_index = max(new_summary1_line.find(token_find), new_summary2_line.find(token_find),
new_summary3_line.find(token_find), new_summary4_line.find(token_find))
if find_index > -1:
token_replace = ' {} '.format(article_ner)
new_summary1_line = new_summary1_line.replace(token_find, token_replace)
new_summary2_line = new_summary2_line.replace(token_find, token_replace)
new_summary3_line = new_summary3_line.replace(token_find, token_replace)
new_summary4_line = new_summary4_line.replace(token_find, token_replace)
new_article_line = new_article_line.replace(token_find, token_replace)
count_article_driven_replacements += 1
for summary_line_list, output_summary_file, new_summary_line, summary_line in zip(
[summary1_line_list, summary2_line_list, summary3_line_list, summary4_line_list],
[output_summary1_file, output_summary2_file, output_summary3_file, output_summary4_file],
[new_summary1_line, new_summary2_line, new_summary3_line, new_summary4_line],
[summary1_line, summary2_line, summary3_line, summary4_line]):
for (summary_token, summary_ner) in summary_line_list:
if summary_ner in stanford_ner_tags:
change_flag = True
if word_freq_hypernyms_dict.get(summary_token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[summary_token]
if freq > word_freq_thresold:
change_flag = False
if change_flag:
token_find = ' {} '.format(summary_token)
find_index = new_article_line.find(token_find)
if find_index > -1:
token_replace = ' {} '.format(summary_ner)
new_article_line = new_article_line.replace(token_find, token_replace)
new_summary_line = new_summary_line.replace(token_find, token_replace)
count_summary_driven_replacements += 1
output_summary_file.write(new_summary_line.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, article_line, new_article_line, summary_line, new_summary_line))
print(
'\tarticle driven replacements: {}\n\tSummary driven replacements: {}\n'
'\tOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
output_article_file.write(new_article_line.strip() + '\n')
del article_word_ner_per_line_list
del summary1_word_ner_per_line_list
del summary2_word_ner_per_line_list
del summary3_word_ner_per_line_list
del summary4_word_ner_per_line_list
output_article_file.close()
output_summary1_file.close()
output_summary2_file.close()
output_summary3_file.close()
output_summary4_file.close()
print('3/9')
print('\narticle driven replacements: {}\nSummary driven replacements: {}\nOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
wordnet_ner_tags = ['person', 'location', 'organization']
article_per_line_list = []
output_article_file = open(output_article_file_path, 'r', encoding='utf8')
for line in output_article_file:
article_per_line_list.append(line.split())
output_article_file.close()
summary1_per_line_list = []
summary2_per_line_list = []
summary3_per_line_list = []
summary4_per_line_list = []
output_summary1_file = open(output_summary1_file_path, 'r', encoding='utf8')
output_summary2_file = open(output_summary2_file_path, 'r', encoding='utf8')
output_summary3_file = open(output_summary3_file_path, 'r', encoding='utf8')
output_summary4_file = open(output_summary4_file_path, 'r', encoding='utf8')
for line in output_summary1_file:
summary1_per_line_list.append(line.split())
output_summary1_file.close()
for line in output_summary2_file:
summary2_per_line_list.append(line.split())
output_summary2_file.close()
for line in output_summary3_file:
summary3_per_line_list.append(line.split())
output_summary3_file.close()
for line in output_summary4_file:
summary4_per_line_list.append(line.split())
output_summary4_file.close()
print('4/9')
article_word_pos_per_line_list = utils.read_pickle_file(input_article_pos_pickle_file_path)
print('5/9')
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary1_file = open(output_summary1_file_path, 'w', encoding='utf8')
output_summary2_file = open(output_summary2_file_path, 'w', encoding='utf8')
output_summary3_file = open(output_summary3_file_path, 'w', encoding='utf8')
output_summary4_file = open(output_summary4_file_path, 'w', encoding='utf8')
stan_repl_art = 0
wordnet_repl_art = 0
line_count = 0
for article_line_list, summary1_line_list, summary2_line_list, summary3_line_list, summary4_line_list, \
word_pos_line_list in zip(article_per_line_list, summary1_per_line_list, summary2_per_line_list,
summary3_per_line_list, summary4_per_line_list, article_word_pos_per_line_list):
# for line_list in article_per_line_list:
line_count += 1
new_line_str = ''
old_line_str = ''
old_summary1_line_str = ' '
for word in summary1_line_list:
old_summary1_line_str += word + ' '
old_summary2_line_str = ' '
for word in summary2_line_list:
old_summary2_line_str += word + ' '
old_summary3_line_str = ' '
for word in summary3_line_list:
old_summary3_line_str += word + ' '
old_summary4_line_str = ' '
for word in summary4_line_list:
old_summary4_line_str += word + ' '
new_summary1_line_str = old_summary1_line_str
new_summary2_line_str = old_summary2_line_str
new_summary3_line_str = old_summary3_line_str
new_summary4_line_str = old_summary4_line_str
for word, (word_, pos) in zip(article_line_list, word_pos_line_list):
old_line_str += word + ' '
if word in stanford_ner_tags:
new_line_str += word + '_ '
stan_repl_art += 1
elif pos == 'n':
flag = True
if word_freq_hypernyms_dict.get(word, None):
(freq, norm_freq, hypernyms_depth_list) = word_freq_hypernyms_dict[word]
if freq < word_freq_thresold + 1:
for (hyp, depth) in hypernyms_depth_list:
if hyp in wordnet_ner_tags:
new_line_str += hyp + '_ '
wordnet_repl_art += 1
token_for_replacemet = ' {} '.format(word)
token_replace = ' {}_ '.format(hyp)
new_summary1_line_str = new_summary1_line_str.replace(token_for_replacemet,
token_replace)
new_summary2_line_str = new_summary2_line_str.replace(token_for_replacemet,
token_replace)
new_summary3_line_str = new_summary3_line_str.replace(token_for_replacemet,
token_replace)
new_summary4_line_str = new_summary4_line_str.replace(token_for_replacemet,
token_replace)
flag = False
break
if flag:
new_line_str += word + ' '
else:
new_line_str += word + ' '
output_article_file.write(new_line_str.strip() + '\n')
output_summary1_file.write(new_summary1_line_str.strip() + '\n')
output_summary2_file.write(new_summary2_line_str.strip() + '\n')
output_summary3_file.write(new_summary3_line_str.strip() + '\n')
output_summary4_file.write(new_summary4_line_str.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}\n\t'
'old_sum: {}\n\tnew_sum: {}\n\told_sum: {}\n\tnew_sum: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, old_line_str, new_line_str, old_summary1_line_str, new_summary1_line_str,
old_summary2_line_str, new_summary2_line_str, old_summary3_line_str, new_summary3_line_str,
old_summary4_line_str, new_summary4_line_str))
print('\tArticle -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_art, wordnet_repl_art, stan_repl_art + wordnet_repl_art))
output_article_file.close()
output_summary1_file.close()
output_summary2_file.close()
output_summary3_file.close()
output_summary4_file.close()
del article_word_pos_per_line_list
del article_per_line_list
print('6/9')
stan_repl_sum = 0
wordnet_repl_sum = 0
line_count = 0
for output_summary_file_path, input_summary_pos_pickle_file_path \
in zip([output_summary1_file_path, output_summary2_file_path, output_summary3_file_path,
output_summary4_file_path], [input_summary1_pos_pickle_file_path,
input_summary2_pos_pickle_file_path,
input_summary3_pos_pickle_file_path,
input_summary4_pos_pickle_file_path]):
article_per_line_list = []
output_article_file = open(output_article_file_path, 'r', encoding='utf8')
for line in output_article_file:
article_per_line_list.append(line.split())
output_article_file.close()
summary_per_line_list = []
output_summary_file = open(output_summary_file_path, 'r', encoding='utf8')
for line in output_summary_file:
summary_per_line_list.append(line.split())
output_summary_file.close()
summary_word_pos_per_line_list = utils.read_pickle_file(input_summary_pos_pickle_file_path)
print('7/9')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
for summary_line_list, article_line_list, word_pos_line_list in \
zip(summary_per_line_list, article_per_line_list, summary_word_pos_per_line_list):
line_count += 1
new_line_str = ''
old_line_str = ''
old_article_line_str = ' '
for word in article_line_list:
old_article_line_str += word + ' '
new_article_line_str = old_article_line_str
for word, (word_, pos) in zip(summary_line_list, word_pos_line_list):
old_line_str += word + ' '
if word in stanford_ner_tags:
new_line_str += word + '_ '
stan_repl_sum += 1
elif pos == 'n':
flag = True
if word_freq_hypernyms_dict.get(word, None):
(freq, norm_freq, hypernyms_depth_list) = word_freq_hypernyms_dict[word]
if freq < word_freq_thresold + 1:
for (hyp, depth) in hypernyms_depth_list:
if hyp in wordnet_ner_tags:
new_line_str += hyp + '_ '
token_for_replacement = ' {} '.format(word)
token_replace = ' {}_ '.format(hyp)
new_article_line_str = new_article_line_str.replace(token_for_replacement,
token_replace)
wordnet_repl_sum += 1
flag = False
break
if flag:
new_line_str += word + ' '
else:
new_line_str += word + ' '
output_summary_file.write(new_line_str.strip() + '\n')
output_article_file.write(new_article_line_str.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, old_article_line_str, new_article_line_str, old_line_str, new_line_str))
print('\tSummary -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_sum, wordnet_repl_sum, stan_repl_sum + wordnet_repl_sum))
output_summary_file.close()
output_article_file.close()
del summary_word_pos_per_line_list
del summary_per_line_list
print('8/9')
print(
'\nOverall replacements:\n\tArticle -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_art, wordnet_repl_art, stan_repl_art + wordnet_repl_art))
print(
'\tSummary -> stanford, wordnet and overall replacements: {}, {} & {}'.format(
stan_repl_sum, wordnet_repl_sum, stan_repl_sum + wordnet_repl_sum))
print('\tArticle and Summary -> stanford, wordnet and overall replacements {}, {}, {}'.format(
stan_repl_art + stan_repl_sum,
wordnet_repl_art + wordnet_repl_sum,
stan_repl_art + stan_repl_sum +
wordnet_repl_art + wordnet_repl_sum
))
print('Output files of threshold {}:\n\t{}\n\t{}\n\t{}\n\t{}\n\t{}'.format(
word_freq_thresold, output_article_file_path, output_summary1_file_path, output_summary2_file_path,
output_summary3_file_path, output_summary4_file_path))
print('9/9')
def conver_dataset_with_ner_from_stanford_and_wordnet(self, input_article_ner_pickle_file_path,
input_summary_ner_pickle_file_path,
input_article_pos_pickle_file_path,
input_summary_pos_pickle_file_path,
input_word_freq_hypernyms_pickle_file_path,
output_article_file_path, output_summary_file_path,
word_freq_thresold=9999990,
print_per_line=100000):
word_freq_hypernyms_dict = utils.read_pickle_file(input_word_freq_hypernyms_pickle_file_path)
article_word_ner_per_line_list = utils.read_pickle_file(input_article_ner_pickle_file_path)
print('1/9')
summary_word_ner_per_line_list = utils.read_pickle_file(input_summary_ner_pickle_file_path)
print('2/9')
stanford_ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
count_article_driven_replacements = 0
count_summary_driven_replacements = 0
line_count = 0
for article_line_list, summary_line_list in zip(article_word_ner_per_line_list, summary_word_ner_per_line_list):
line_count += 1
article_line = ' '
summary_line = ' '
for (article_token, article_ner) in article_line_list:
article_line += article_token + ' '
for (summary_token, summary_ner) in summary_line_list:
summary_line += summary_token + ' '
new_article_line = article_line
new_summary_line = summary_line
for (article_token, article_ner) in article_line_list:
if article_ner in stanford_ner_tags:
change_flag = True
if word_freq_hypernyms_dict.get(article_token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[article_token]
if freq > word_freq_thresold:
change_flag = False
if change_flag:
token_find = ' {} '.format(article_token)
find_index = new_summary_line.find(token_find)
if find_index > -1:
token_replace = ' {} '.format(article_ner)
new_summary_line = new_summary_line.replace(token_find, token_replace)
new_article_line = new_article_line.replace(token_find, token_replace)
count_article_driven_replacements += 1
for (summary_token, summary_ner) in summary_line_list:
if summary_ner in stanford_ner_tags:
change_flag = True
if word_freq_hypernyms_dict.get(summary_token, None):
(freq, norm_freq, hypernyms_path_list) = word_freq_hypernyms_dict[summary_token]
if freq > word_freq_thresold:
change_flag = False
if change_flag:
token_find = ' {} '.format(summary_token)
find_index = new_article_line.find(token_find)
if find_index > -1:
token_replace = ' {} '.format(summary_ner)
new_article_line = new_article_line.replace(token_find, token_replace)
new_summary_line = new_summary_line.replace(token_find, token_replace)
count_summary_driven_replacements += 1
output_article_file.write(new_article_line.strip() + '\n')
output_summary_file.write(new_summary_line.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, article_line, new_article_line, summary_line, new_summary_line))
print(
'\tarticle driven replacements: {}\n\tSummary driven replacements: {}\n'
'\tOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
del article_word_ner_per_line_list
del summary_word_ner_per_line_list
output_article_file.close()
output_summary_file.close()
print('3/9')
print('\narticle driven replacements: {}\nSummary driven replacements: {}\nOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
wordnet_ner_tags = ['person', 'location', 'organization']
article_per_line_list = []
output_article_file = open(output_article_file_path, 'r', encoding='utf8')
for line in output_article_file:
article_per_line_list.append(line.split())
output_article_file.close()
summary_per_line_list = []
output_summary_file = open(output_summary_file_path, 'r', encoding='utf8')
for line in output_summary_file:
summary_per_line_list.append(line.split())
output_summary_file.close()
print('4/9')
article_word_pos_per_line_list = utils.read_pickle_file(input_article_pos_pickle_file_path)
print('5/9')
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
stan_repl_art = 0
wordnet_repl_art = 0
line_count = 0
for article_line_list, summary_line_list, word_pos_line_list in \
zip(article_per_line_list, summary_per_line_list, article_word_pos_per_line_list):
# for line_list in article_per_line_list:
line_count += 1
new_line_str = ''
old_line_str = ''
old_summary_line_str = ' '
for word in summary_line_list:
old_summary_line_str += word + ' '
new_summary_line_str = old_summary_line_str
for word, (word_, pos) in zip(article_line_list, word_pos_line_list):
old_line_str += word + ' '
if word in stanford_ner_tags:
new_line_str += word + '_ '
stan_repl_art += 1
elif pos == 'n':
flag = True
if word_freq_hypernyms_dict.get(word, None):
(freq, norm_freq, hypernyms_depth_list) = word_freq_hypernyms_dict[word]
if freq < word_freq_thresold + 1:
for (hyp, depth) in hypernyms_depth_list:
if hyp in wordnet_ner_tags:
new_line_str += hyp + '_ '
wordnet_repl_art += 1
token_for_replacemet = ' {} '.format(word)
token_replace = ' {}_ '.format(hyp)
new_summary_line_str = new_summary_line_str.replace(token_for_replacemet,
token_replace)
flag = False
break
if flag:
new_line_str += word + ' '
else:
new_line_str += word + ' '
output_article_file.write(new_line_str.strip() + '\n')
output_summary_file.write(new_summary_line_str.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, old_line_str, new_line_str, old_summary_line_str, new_summary_line_str))
print('\tArticle -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_art, wordnet_repl_art, stan_repl_art + wordnet_repl_art))
output_article_file.close()
output_summary_file.close()
del article_word_pos_per_line_list
del article_per_line_list
print('6/9')
article_per_line_list = []
output_article_file = open(output_article_file_path, 'r', encoding='utf8')
for line in output_article_file:
article_per_line_list.append(line.split())
output_article_file.close()
summary_per_line_list = []
output_summary_file = open(output_summary_file_path, 'r', encoding='utf8')
for line in output_summary_file:
summary_per_line_list.append(line.split())
output_summary_file.close()
summary_word_pos_per_line_list = utils.read_pickle_file(input_summary_pos_pickle_file_path)
print('7/9')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
stan_repl_sum = 0
wordnet_repl_sum = 0
line_count = 0
for summary_line_list, article_line_list, word_pos_line_list in \
zip(summary_per_line_list, article_per_line_list, summary_word_pos_per_line_list):
line_count += 1
new_line_str = ''
old_line_str = ''
old_article_line_str = ' '
for word in article_line_list:
old_article_line_str += word + ' '
new_article_line_str = old_article_line_str
for word, (word_, pos) in zip(summary_line_list, word_pos_line_list):
old_line_str += word + ' '
if word in stanford_ner_tags:
new_line_str += word + '_ '
stan_repl_sum += 1
elif pos == 'n':
flag = True
if word_freq_hypernyms_dict.get(word, None):
(freq, norm_freq, hypernyms_depth_list) = word_freq_hypernyms_dict[word]
if freq < word_freq_thresold + 1:
for (hyp, depth) in hypernyms_depth_list:
if hyp in wordnet_ner_tags:
new_line_str += hyp + '_ '
token_for_replacement = ' {} '.format(word)
token_replace = ' {}_ '.format(hyp)
new_article_line_str = new_article_line_str.replace(token_for_replacement,
token_replace)
wordnet_repl_sum += 1
flag = False
break
if flag:
new_line_str += word + ' '
else:
new_line_str += word + ' '
output_summary_file.write(new_line_str.strip() + '\n')
output_article_file.write(new_article_line_str.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, old_article_line_str, new_article_line_str, old_line_str, new_line_str))
print('\tSummary -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_sum, wordnet_repl_sum, stan_repl_sum + wordnet_repl_sum))
output_summary_file.close()
del summary_word_pos_per_line_list
del summary_per_line_list
print('8/9')
print(
'\nOverall replacements:\n\tArticle -> stanford, wordnet and Overall replacements: {}, {} & {}'.format(
stan_repl_art, wordnet_repl_art, stan_repl_art + wordnet_repl_art))
print(
'\tSummary -> stanford, wordnet and overall replacements: {}, {} & {}'.format(
stan_repl_sum, wordnet_repl_sum, stan_repl_sum + wordnet_repl_sum))
print('\tArticle and Summary -> stanford, wordnet and overall replacements {}, {}, {}'.format(
stan_repl_art + stan_repl_sum,
wordnet_repl_art + wordnet_repl_sum,
stan_repl_art + stan_repl_sum +
wordnet_repl_art + wordnet_repl_sum
))
print('Output files of threshold {}:\n\t{}\n\t{}'.format(
word_freq_thresold, output_article_file_path, output_summary_file_path))
print('9/9')
def convert_duc_dataset_based_on_level_of_generalizetion(self,
article_word_pos_line_list_pickle_file_path,
ref1_word_pos_line_list_pickle_file_path,
ref2_word_pos_line_list_pickle_file_path,
ref3_word_pos_line_list_pickle_file_path,
ref4_word_pos_line_list_pickle_file_path,
word_hypernym_dict_pickle_file_path,
output_articles_file_path,
output_ref1_file_path,
output_ref2_file_path,
output_ref3_file_path,
output_ref4_file_path):
word_hypernym_dict = utils.read_pickle_file(word_hypernym_dict_pickle_file_path)
output_articles_file = open(output_articles_file_path, 'w', encoding='utf8')
articles_word_pos_list_per_line = utils.read_pickle_file(article_word_pos_line_list_pickle_file_path)
article_changes = 0
words_set = set()
hypernyms_set = set()
for line_list in articles_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
article_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_articles_file.write(line_text.strip() + '\n')
output_articles_file.close()
del articles_word_pos_list_per_line
print('(Articles) Distinct nouns: {}, Changes with distinct hypernyms: {}'.format(len(words_set),
len(hypernyms_set)))
summary1_changes = 0
output_summaries_file = open(output_ref1_file_path, 'w', encoding='utf8')
summaries_word_pos_list_per_line = utils.read_pickle_file(ref1_word_pos_line_list_pickle_file_path)
for line_list in summaries_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
summary1_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_summaries_file.write(line_text.strip() + '\n')
print('(Articles & summaries) Distinct nouns: {}, Changes with distinct hypernyms: {}'.format(len(words_set),
len(
hypernyms_set)))
del summaries_word_pos_list_per_line
output_summaries_file.close()
summary2_changes = 0
output_summaries_file = open(output_ref2_file_path, 'w', encoding='utf8')
summaries_word_pos_list_per_line = utils.read_pickle_file(ref2_word_pos_line_list_pickle_file_path)
for line_list in summaries_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
summary2_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_summaries_file.write(line_text.strip() + '\n')
del summaries_word_pos_list_per_line
output_summaries_file.close()
summary3_changes = 0
output_summaries_file = open(output_ref3_file_path, 'w', encoding='utf8')
summaries_word_pos_list_per_line = utils.read_pickle_file(ref3_word_pos_line_list_pickle_file_path)
for line_list in summaries_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
summary3_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_summaries_file.write(line_text.strip() + '\n')
del summaries_word_pos_list_per_line
output_summaries_file.close()
summary4_changes = 0
output_summaries_file = open(output_ref4_file_path, 'w', encoding='utf8')
summaries_word_pos_list_per_line = utils.read_pickle_file(ref4_word_pos_line_list_pickle_file_path)
for line_list in summaries_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
summary4_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_summaries_file.write(line_text.strip() + '\n')
del summaries_word_pos_list_per_line
output_summaries_file.close()
print('article changes: {}\n'
'Summary1 changes {}\nSummary2 changes {}\nSummary3 changes {}\nSummary4 changes {}\n'
'Overall changes: {}'.format(article_changes,
summary1_changes, summary2_changes, summary3_changes, summary4_changes,
article_changes + summary1_changes + summary2_changes +
summary3_changes + summary4_changes))
def convert_dataset_to_general(self, article_word_pos_line_list_pickle_file_path,
summary_word_pos_line_list_pickle_file_path,
word_hypernym_dict_pickle_file_path,
output_articles_file_path, output_summaries_file_path):
word_hypernym_dict = utils.read_pickle_file(word_hypernym_dict_pickle_file_path)
output_articles_file = open(output_articles_file_path, 'w', encoding='utf8')
articles_word_pos_list_per_line = utils.read_pickle_file(article_word_pos_line_list_pickle_file_path)
article_changes = 0
words_set = set()
hypernyms_set = set()
for line_list in articles_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
article_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_articles_file.write(line_text.strip() + '\n')
output_articles_file.close()
del articles_word_pos_list_per_line
print('(Articles) Distinct nouns: {}, Changes with distinct hypernyms: {}'.format(len(words_set),
len(hypernyms_set)))
summary_changes = 0
output_summaries_file = open(output_summaries_file_path, 'w', encoding='utf8')
summaries_word_pos_list_per_line = utils.read_pickle_file(summary_word_pos_line_list_pickle_file_path)
for line_list in summaries_word_pos_list_per_line:
line_text = ''
for (word, pos) in line_list:
if pos is 'n':
words_set.add(word)
try:
hypernym = word_hypernym_dict[word]
hypernyms_set.add(hypernym)
if word != hypernym:
line_text += hypernym + '_ '
summary_changes += 1
else:
line_text += word + ' '
except KeyError:
line_text += word + ' '
else:
line_text += word + ' '
output_summaries_file.write(line_text.strip() + '\n')
print('(Articles & summaries) Distinct nouns: {}, Changes with distinct hypernyms: {}'.format(len(words_set),
len(
hypernyms_set)))
del summaries_word_pos_list_per_line
output_summaries_file.close()
print('article changes: {}\nSummary changes {}\n'
'Overall changes: {}'.format(article_changes, summary_changes, article_changes + summary_changes))
def vocab_based_on_hypernyms(self, input_word_freq_hypernyms_pickle_file_path,
output_hypernym_freq_wordlist_remaininghypernyms_txt_file_path,
output_hypernym_freq_wordlist_remaininghypernyms_pickle_file_path,
output_word_hypernym_dict_pickle_file,
output_word_hypernym_dict_txt_file,
upper_word_freq_thres=9999000, min_depth=5):
word_freq_hypernyms_dict = utils.read_pickle_file(input_word_freq_hypernyms_pickle_file_path)
hypernym_freq_wordlist_remaininghypernyms_dict = dict()
for word, (freq, norm_freq, hypernyms_list) in word_freq_hypernyms_dict.items():
hypernym_freq_wordlist_remaininghypernyms_dict[word] = (freq, norm_freq, [word], hypernyms_list)
flag = True
while flag:
flag = False
temp_dict = dict()
# count_words = 0
for k, v in hypernym_freq_wordlist_remaininghypernyms_dict.items():
temp_dict[k] = v
# count_words += len(v[2])
# print(count_words)
# count_changes = 0
for word, (freq, norm_freq, wordlist, hypernyms_list) in temp_dict.items():
if freq < upper_word_freq_thres + 1:
(hypernym, depth) = hypernyms_list[0]
if depth >= min_depth:
flag = True
# count_changes += 1
if hypernym_freq_wordlist_remaininghypernyms_dict.get(hypernym, None):
(new_freq, new_norm_freq, new_word_list, new_hypernyms_list) = \
hypernym_freq_wordlist_remaininghypernyms_dict[hypernym]
(freq, norm_freq, wordlist, hypernyms_list) = \
hypernym_freq_wordlist_remaininghypernyms_dict.pop(word)
log_max_freq = np.log10(new_freq + 1) / new_norm_freq
new_word_list = list(set([word] + wordlist + new_word_list))
new_freq += freq
# hypernyms_list = []
# hypernyms_list.remove((hypernym, depth))
# if new_norm_freq > 0.99999:
# new_norm_freq = 1.0
# else:
new_norm_freq = np.log10(new_freq + 1) / log_max_freq
merged_hypernyms_list = list(set(hypernyms_list + new_hypernyms_list))
merged_hypernyms_list = utils.sort_by_second(merged_hypernyms_list, descending=True)
try:
merged_hypernyms_list.remove((hypernym, depth))
except ValueError:
_ = None
hypernym_freq_wordlist_remaininghypernyms_dict[hypernym] = (
new_freq, new_norm_freq, new_word_list, merged_hypernyms_list)
else:
(freq, norm_freq, wordlist, hypernyms_list) = \
hypernym_freq_wordlist_remaininghypernyms_dict.pop(word)
hypernyms_list.remove((hypernym, depth))
# hypernym_freq_wordlist_remaininghypernyms_dict.pop(word)
hypernym_freq_wordlist_remaininghypernyms_dict[hypernym] = (
freq, norm_freq, list(set([word] + wordlist)), hypernyms_list)
# print(count_changes)
# count_words = 0
##############################################
hypernym_set = set()
word_list_set = set()
for word, (new_freq, new_norm_freq, new_word_list,
new_hypernyms_list) in hypernym_freq_wordlist_remaininghypernyms_dict.items():
hypernym_set.add(word)
for el in new_word_list:
word_list_set.add(el)
print('Generalized & overall words: {} {}'.format(len(hypernym_set), len(word_list_set)))
####################################
hypernym_freq_wordlist_remaininghypernyms_list = []
for word, (new_freq, new_norm_freq, new_word_list,
new_hypernyms_list) in hypernym_freq_wordlist_remaininghypernyms_dict.items():
# count_words += len(new_word_list)
hypernym_freq_wordlist_remaininghypernyms_list.append(
(word, new_freq, new_norm_freq, new_word_list, new_hypernyms_list))
# print(count_words)
hypernym_freq_wordlist_remaininghypernyms_list = utils.sort_by_second(
hypernym_freq_wordlist_remaininghypernyms_list, descending=True)
with open(output_hypernym_freq_wordlist_remaininghypernyms_txt_file_path, 'w', encoding='utf8') as f:
for (word, new_freq, new_norm_freq, new_word_list,
new_hypernyms_list) in hypernym_freq_wordlist_remaininghypernyms_list:
f.write('{} {} {} {} {}\n'.format(word, new_freq, new_norm_freq, new_word_list, new_hypernyms_list))
with open(output_hypernym_freq_wordlist_remaininghypernyms_pickle_file_path, 'wb') as f:
pickle.dump(hypernym_freq_wordlist_remaininghypernyms_dict, f)
word_hypernym_dict = dict()
count_words = 0
for hypernym, (freq, norm_freq, word_list,
hypernyms_list) in hypernym_freq_wordlist_remaininghypernyms_dict.items():
# count_words += len(word_list)
for w in word_list:
if word_hypernym_dict.get(w, None):
count_words += 1
hyp1 = word_hypernym_dict[w]
(freq1, norm_freq, _, _) = hypernym_freq_wordlist_remaininghypernyms_dict[hyp1]
(freq2, norm_freq, _, _) = hypernym_freq_wordlist_remaininghypernyms_dict[hypernym]
if freq2 > freq1:
word_hypernym_dict[w] = hypernym
else:
word_hypernym_dict[w] = hypernym
print('same w: ', count_words)
with open(output_word_hypernym_dict_pickle_file, 'wb') as f:
pickle.dump(word_hypernym_dict, f)
with open(output_word_hypernym_dict_txt_file, 'w', encoding='utf8') as f:
for w, h in word_hypernym_dict.items():
f.write('{} {}\n'.format(w, h))
# print('words: {}'.format(count_words))
def word_freq_hypernym_paths(self, input_word_pos_freq_dict_file_path,
output_word_freq_hypernyms_txt_file_path,
output_word_freq_hypernyms_pickle_file_path):
word_pos_freq_dict = utils.read_pickle_file(input_word_pos_freq_dict_file_path)
word_freq_hypernyms_dict = dict()
lemmatizer = nltk.stem.WordNetLemmatizer()
max_freq = 0
for (token, pos), freq in word_pos_freq_dict.items():
if pos == 'n':
token_lemma = lemmatizer.lemmatize(token, pos='n')
synset = self.make_synset(token_lemma, category='n')
if synset is not None:
synset.max_depth()
merged_synset_list = self.merge_lists(synset.hypernym_paths())
sorted_synsets = self.syncet_sort_accornding_max_depth(merged_synset_list)
word_depth_list = self.word_depth_of_synsents(sorted_synsets)
if word_depth_list[0][0] != token_lemma:
word_depth_list = [(token_lemma, word_depth_list[0][1] + 1)] + word_depth_list
word_freq_hypernyms_dict[token] = (freq, word_depth_list)
if freq > max_freq:
max_freq = freq
word_freq_hypernyms_list = []
word_freq_normfreq_hypernyms_dict = dict()
for word, (freq, hypernym_depth_list) in word_freq_hypernyms_dict.items():
norm_freq = np.log10(freq + 1) / np.log10(max_freq + 1)
word_freq_hypernyms_list.append((word, freq, norm_freq, hypernym_depth_list))
word_freq_normfreq_hypernyms_dict[word] = (freq, norm_freq, hypernym_depth_list)
del word_freq_hypernyms_dict
word_freq_hypernyms_list = utils.sort_by_second(word_freq_hypernyms_list, descending=True)
with open(output_word_freq_hypernyms_txt_file_path, 'w', encoding='utf8') as f:
for (word, freq, norm_freq, hypernym_depth_list) in word_freq_hypernyms_list:
f.write('{} {} {} {}\n'.format(word, freq, norm_freq, hypernym_depth_list))
with open(output_word_freq_hypernyms_pickle_file_path, 'wb') as f:
pickle.dump(word_freq_normfreq_hypernyms_dict, f)
def conver_dataset_with_ner(self, input_article_ner_pickle_file_path, input_summary_ner_pickle_file_path,
output_article_file_path, output_summary_file_path,
print_per_line=200):
article_word_ner_per_line_list = utils.read_pickle_file(input_article_ner_pickle_file_path)
summary_word_ner_per_line_list = utils.read_pickle_file(input_summary_ner_pickle_file_path)
ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
output_article_file = open(output_article_file_path, 'w', encoding='utf8')
output_summary_file = open(output_summary_file_path, 'w', encoding='utf8')
count_article_driven_replacements = 0
count_summary_driven_replacements = 0
line_count = 0
for article_line_list, summary_line_list in zip(article_word_ner_per_line_list, summary_word_ner_per_line_list):
line_count += 1
article_line = ' '
summary_line = ' '
for (article_token, article_ner) in article_line_list:
article_line += article_token + ' '
for (summary_token, summary_ner) in summary_line_list:
summary_line += summary_token + ' '
new_article_line = article_line
new_summary_line = summary_line
for (article_token, article_ner) in article_line_list:
if article_ner in ner_tags:
# summary_line_list = ['alpha', 'beta', 'gama', 'delta']
token_find = ' {} '.format(article_token)
find_index = new_summary_line.find(token_find)
if find_index > -1:
token_replace = ' {}_ '.format(article_ner)
new_summary_line = new_summary_line.replace(token_find, token_replace)
new_article_line = new_article_line.replace(token_find, token_replace)
count_article_driven_replacements += 1
for (summary_token, summary_ner) in summary_line_list:
if summary_ner in ner_tags:
# summary_line_list = ['alpha', 'beta', 'gama', 'delta']
token_find = ' {} '.format(summary_token)
find_index = new_article_line.find(token_find)
if find_index > -1:
token_replace = ' {}_ '.format(summary_ner)
new_article_line = new_article_line.replace(token_find, token_replace)
new_summary_line = new_summary_line.replace(token_find, token_replace)
count_summary_driven_replacements += 1
output_article_file.write(new_article_line.strip() + '\n')
output_summary_file.write(new_summary_line.strip() + '\n')
if line_count % print_per_line == 0:
print('{} line:\n\told_art: {}\n\tnew_art: {}\n\told_sum: {}\n\tnew_sum: {}'.format(
line_count, article_line, new_article_line, summary_line, new_summary_line))
print(
'\tarticle driven replacements: {}\n\tSummary driven replacements: {}\n'
'\tOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
output_article_file.close()
output_summary_file.close()
print('\narticle driven replacements: {}\nSummary driven replacements: {}\nOverall replacements: {}'.format(
count_article_driven_replacements, count_summary_driven_replacements,
count_article_driven_replacements + count_summary_driven_replacements))
def ner_of_dataset_and_vocabulary_of_ner_words(self, input_article_file_path,
input_summary_file_path,
output_article_word_ner_line_list_sample_txt_file,
output_summary_word_ner_line_list_sample_txt_file,
output_article_word_ner_line_list_pickle_file,
output_summary_word_ner_line_list_pickle_file,
output_word_ner_freq_dict_txt_file,
output_word_ner_freq_dict_pickle_file,
lines_per_ner_application, print_per_line=100,
lines_of_sample_files=1000):
t0 = time.time()
input_article_batch_text_list = []
input_summary_batch_text_list = []
ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
print('Reading files:\n\t{}\n\t{}'.format(input_article_file_path, input_summary_file_path))
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
print('Article data have been loaded on batch list')
word_ner_freq_dict = dict()
article_word_ner_per_line_list = []
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_ner_application
word_ner_list = self.stanford_ner(batch_text_list)
line_word_ner_list = []
for (word, ner) in word_ner_list:
if word != 'NL_':
line_word_ner_list.append((word, ner))
if ner in ner_tags:
try:
new_freq = word_ner_freq_dict[(word, ner)] + 1
word_ner_freq_dict[(word, ner)] = new_freq
except KeyError:
word_ner_freq_dict[(word, ner)] = 1
else:
article_word_ner_per_line_list.append(line_word_ner_list)
line_word_ner_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article ner tags have been obtained.')
with open(output_article_word_ner_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_ner_line_list in article_word_ner_per_line_list:
f.write('{}\n'.format(word_ner_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_article_word_ner_line_list_pickle_file, 'wb') as f:
pickle.dump(article_word_ner_per_line_list, f)
del article_word_ner_per_line_list
print('Article files have been created:\n\t{}\n\t{}'.format(
output_article_word_ner_line_list_sample_txt_file, output_article_word_ner_line_list_pickle_file))
with open(input_summary_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
print('Summary data have been loaded on batch list')
t1 = time.time()
line_index = 0
summary_word_ner_per_line_list = []
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_ner_application
word_ner_list = self.stanford_ner(batch_text_list)
line_word_ner_list = []
for (word, ner) in word_ner_list:
if word != 'NL_':
line_word_ner_list.append((word, ner))
if ner in ner_tags:
try:
new_freq = word_ner_freq_dict[(word, ner)] + 1
word_ner_freq_dict[(word, ner)] = new_freq
except KeyError:
word_ner_freq_dict[(word, ner)] = 1
else:
summary_word_ner_per_line_list.append(line_word_ner_list)
line_word_ner_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Summary ner tags have been obtained.')
del input_summary_batch_text_list
with open(output_summary_word_ner_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in summary_word_ner_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_summary_word_ner_line_list_pickle_file, 'wb') as f:
pickle.dump(summary_word_ner_per_line_list, f)
print('Summary files have been created:\n\t{}\n\t{}'.format(
output_summary_word_ner_line_list_sample_txt_file, output_summary_word_ner_line_list_pickle_file))
del summary_word_ner_per_line_list
word_pos_freq_list = []
for k, v in word_ner_freq_dict.items():
word_pos_freq_list.append((k, v))
word_pos_freq_list = sorted(word_pos_freq_list, key=lambda tup: -tup[1])
with open(output_word_ner_freq_dict_txt_file, 'w', encoding='utf8') as f:
for ((w, p), freq) in word_pos_freq_list:
f.write('{} {} {}\n'.format(w, p, freq))
with open(output_word_ner_freq_dict_pickle_file, 'wb') as f:
pickle.dump(word_ner_freq_dict, f)
print('Word-pos-freq files have been created:\n\t{}\n\t{}'.format(
output_word_ner_freq_dict_txt_file, output_word_ner_freq_dict_pickle_file))
def ner_of_duc_dataset_and_vocab_of_ne(self, input_article_file_path,
input_summary1_file_path,
input_summary2_file_path,
input_summary3_file_path,
input_summary4_file_path,
output_article_word_ner_line_list_sample_txt_file,
output_summary1_word_ner_line_list_sample_txt_file,
output_summary2_word_ner_line_list_sample_txt_file,
output_summary3_word_ner_line_list_sample_txt_file,
output_summary4_word_ner_line_list_sample_txt_file,
output_article_word_ner_line_list_pickle_file,
output_summary1_word_ner_line_list_pickle_file,
output_summary2_word_ner_line_list_pickle_file,
output_summary3_word_ner_line_list_pickle_file,
output_summary4_word_ner_line_list_pickle_file,
output_word_ner_freq_dict_txt_file,
output_word_ner_freq_dict_pickle_file,
lines_per_ner_application, print_per_line=100,
lines_of_sample_files=1000):
t0 = time.time()
input_article_batch_text_list = []
ner_tags = ['PERSON', 'LOCATION', 'ORGANIZATION']
print('Reading files:\n\t{}\n\t{}\n\t{}\n\t{}\n\t{}'.format(input_article_file_path, input_summary1_file_path,
input_summary2_file_path,
input_summary3_file_path,
input_summary4_file_path))
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
print('Article data have been loaded on batch list')
word_ner_freq_dict = dict()
article_word_ner_per_line_list = []
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_ner_application
word_ner_list = self.stanford_ner(batch_text_list)
line_word_ner_list = []
for (word, ner) in word_ner_list:
if word != 'NL_':
line_word_ner_list.append((word, ner))
if ner in ner_tags:
try:
new_freq = word_ner_freq_dict[(word, ner)] + 1
word_ner_freq_dict[(word, ner)] = new_freq
except KeyError:
word_ner_freq_dict[(word, ner)] = 1
else:
article_word_ner_per_line_list.append(line_word_ner_list)
line_word_ner_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article ner tags have been obtained.')
with open(output_article_word_ner_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_ner_line_list in article_word_ner_per_line_list:
f.write('{}\n'.format(word_ner_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_article_word_ner_line_list_pickle_file, 'wb') as f:
pickle.dump(article_word_ner_per_line_list, f)
del article_word_ner_per_line_list
print('Article files have been created:\n\t{}\n\t{}'.format(
output_article_word_ner_line_list_sample_txt_file, output_article_word_ner_line_list_pickle_file))
for input_summary_file_path, output_summary_word_ner_line_list_sample_txt_file, \
output_summary_word_ner_line_list_pickle_file \
in zip([input_summary1_file_path, input_summary2_file_path,
input_summary3_file_path, input_summary4_file_path],
[output_summary1_word_ner_line_list_sample_txt_file,
output_summary2_word_ner_line_list_sample_txt_file,
output_summary3_word_ner_line_list_sample_txt_file,
output_summary4_word_ner_line_list_sample_txt_file],
[output_summary1_word_ner_line_list_pickle_file, output_summary2_word_ner_line_list_pickle_file,
output_summary3_word_ner_line_list_pickle_file,
output_summary4_word_ner_line_list_pickle_file]):
input_summary_batch_text_list = []
with open(input_summary_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
print('Summary data have been loaded on batch list')
t1 = time.time()
line_index = 0
summary_word_ner_per_line_list = []
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_ner_application
word_ner_list = self.stanford_ner(batch_text_list)
line_word_ner_list = []
for (word, ner) in word_ner_list:
if word != 'NL_':
line_word_ner_list.append((word, ner))
if ner in ner_tags:
try:
new_freq = word_ner_freq_dict[(word, ner)] + 1
word_ner_freq_dict[(word, ner)] = new_freq
except KeyError:
word_ner_freq_dict[(word, ner)] = 1
else:
summary_word_ner_per_line_list.append(line_word_ner_list)
line_word_ner_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Summary ner tags have been obtained.')
del input_summary_batch_text_list
with open(output_summary_word_ner_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in summary_word_ner_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_summary_word_ner_line_list_pickle_file, 'wb') as f:
pickle.dump(summary_word_ner_per_line_list, f)
print('Summary files have been created:\n\t{}\n\t{}'.format(
output_summary_word_ner_line_list_sample_txt_file, output_summary_word_ner_line_list_pickle_file))
del summary_word_ner_per_line_list
word_pos_freq_list = []
for k, v in word_ner_freq_dict.items():
word_pos_freq_list.append((k, v))
word_pos_freq_list = sorted(word_pos_freq_list, key=lambda tup: -tup[1])
with open(output_word_ner_freq_dict_txt_file, 'w', encoding='utf8') as f:
for ((w, p), freq) in word_pos_freq_list:
f.write('{} {} {}\n'.format(w, p, freq))
with open(output_word_ner_freq_dict_pickle_file, 'wb') as f:
pickle.dump(word_ner_freq_dict, f)
print('Word-pos-freq files have been created:\n\t{}\n\t{}'.format(
output_word_ner_freq_dict_txt_file, output_word_ner_freq_dict_pickle_file))
def pos_tagging_of_duc_dataset_and_vocab_pos_frequent(self,
input_article_file_path,
input_summary1_file_path,
input_summary2_file_path,
input_summary3_file_path,
input_summary4_file_path,
output_article_word_pos_line_list_sample_txt_file,
output_summary1_word_pos_line_list_sample_txt_file,
output_summary2_word_pos_line_list_sample_txt_file,
output_summary3_word_pos_line_list_sample_txt_file,
output_summary4_word_pos_line_list_sample_txt_file,
output_article_word_pos_line_list_pickle_file,
output_summary1_word_pos_line_list_pickle_file,
output_summary2_word_pos_line_list_pickle_file,
output_summary3_word_pos_line_list_pickle_file,
output_summary4_word_pos_line_list_pickle_file,
output_word_pos_freq_dict_txt_file,
output_word_pos_freq_dict_pickle_file,
lines_per_pos_application, print_per_line=50,
lines_of_sample_files=1000):
t0 = time.time()
input_article_batch_text_list = []
print('Reading files:\n\t{}\n\t{}\n\t{}\n\t{}\n\t{}'.format(input_article_file_path, input_summary1_file_path,
input_summary2_file_path, input_summary3_file_path,
input_summary4_file_path))
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
print('Article data have been loaded to batch list')
word_pos_freq_dict = dict()
article_word_pos_per_line_list = []
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_pos_application
word_pos_list = self.wordnet_pos_tag(batch_text_list)
line_word_pos_list = []
for (word, pos) in word_pos_list:
if word != 'NL_':
line_word_pos_list.append((word, pos))
try:
new_freq = word_pos_freq_dict[(word, pos)] + 1
word_pos_freq_dict[(word, pos)] = new_freq
except KeyError:
word_pos_freq_dict[(word, pos)] = 1
else:
article_word_pos_per_line_list.append(line_word_pos_list)
line_word_pos_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article pos tags have been obtained.')
with open(output_article_word_pos_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in article_word_pos_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_article_word_pos_line_list_pickle_file, 'wb') as f:
pickle.dump(article_word_pos_per_line_list, f)
del article_word_pos_per_line_list
print('Article files have been created:\n\t{}\n\t{}'.format(
output_article_word_pos_line_list_sample_txt_file, output_article_word_pos_line_list_pickle_file))
for summary, output_summary_word_pos_line_list_sample_txt_file, output_summary_word_pos_line_list_pickle_file \
in zip([input_summary1_file_path, input_summary2_file_path,
input_summary3_file_path, input_summary4_file_path],
[output_summary1_word_pos_line_list_sample_txt_file,
output_summary2_word_pos_line_list_sample_txt_file,
output_summary3_word_pos_line_list_sample_txt_file,
output_summary4_word_pos_line_list_sample_txt_file],
[output_summary1_word_pos_line_list_pickle_file, output_summary2_word_pos_line_list_pickle_file,
output_summary3_word_pos_line_list_pickle_file,
output_summary4_word_pos_line_list_pickle_file]):
input_summary_batch_text_list = []
with open(summary, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
print('Summary data have been loaded to batch list')
t1 = time.time()
line_index = 0
summary_word_pos_per_line_list = []
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_pos_application
word_pos_list = self.wordnet_pos_tag(batch_text_list)
line_word_pos_list = []
for (word, pos) in word_pos_list:
if word != 'NL_':
line_word_pos_list.append((word, pos))
try:
new_freq = word_pos_freq_dict[(word, pos)] + 1
word_pos_freq_dict[(word, pos)] = new_freq
except KeyError:
word_pos_freq_dict[(word, pos)] = 1
else:
summary_word_pos_per_line_list.append(line_word_pos_list)
line_word_pos_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Summary pos tags have been obtained.')
del input_summary_batch_text_list
with open(output_summary_word_pos_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in summary_word_pos_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_summary_word_pos_line_list_pickle_file, 'wb') as f:
pickle.dump(summary_word_pos_per_line_list, f)
print('Summary files have been created:\n\t{}\n\t{}'.format(
output_summary_word_pos_line_list_sample_txt_file, output_summary_word_pos_line_list_pickle_file))
del summary_word_pos_per_line_list
word_pos_freq_list = []
for k, v in word_pos_freq_dict.items():
word_pos_freq_list.append((k, v))
word_pos_freq_list = sorted(word_pos_freq_list, key=lambda tup: -tup[1])
with open(output_word_pos_freq_dict_txt_file, 'w', encoding='utf8') as f:
for ((w, p), freq) in word_pos_freq_list:
f.write('{} {} {}\n'.format(w, p, freq))
with open(output_word_pos_freq_dict_pickle_file, 'wb') as f:
pickle.dump(word_pos_freq_dict, f)
print('Word-pos-freq files have been created:\n\t{}\n\t{}'.format(
output_word_pos_freq_dict_txt_file, output_word_pos_freq_dict_pickle_file))
def pos_tagging_of_dataset_and_vocabulary_of_words_pos_frequent(self,
input_article_file_path,
input_summary_file_path,
output_article_word_pos_line_list_sample_txt_file,
output_summary_word_pos_line_list_sample_txt_file,
output_article_word_pos_line_list_pickle_file,
output_summary_word_pos_line_list_pickle_file,
output_word_pos_freq_dict_txt_file,
output_word_pos_freq_dict_pickle_file,
lines_per_pos_application, print_per_line=100,
lines_of_sample_files=1000):
t0 = time.time()
input_article_batch_text_list = []
input_summary_batch_text_list = []
print('Reading files:\n\t{}\n\t{}'.format(input_article_file_path, input_summary_file_path))
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
print('Article data have been loaded to batch list')
word_pos_freq_dict = dict()
article_word_pos_per_line_list = []
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_pos_application
word_pos_list = self.wordnet_pos_tag(batch_text_list)
line_word_pos_list = []
for (word, pos) in word_pos_list:
if word != 'NL_':
line_word_pos_list.append((word, pos))
try:
new_freq = word_pos_freq_dict[(word, pos)] + 1
word_pos_freq_dict[(word, pos)] = new_freq
except KeyError:
word_pos_freq_dict[(word, pos)] = 1
else:
article_word_pos_per_line_list.append(line_word_pos_list)
line_word_pos_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article pos tags have been obtained.')
with open(output_article_word_pos_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in article_word_pos_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_article_word_pos_line_list_pickle_file, 'wb') as f:
pickle.dump(article_word_pos_per_line_list, f)
del article_word_pos_per_line_list
print('Article files have been created:\n\t{}\n\t{}'.format(
output_article_word_pos_line_list_sample_txt_file, output_article_word_pos_line_list_pickle_file))
with open(input_summary_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
print('Summary data have been loaded to batch list')
t1 = time.time()
line_index = 0
summary_word_pos_per_line_list = []
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_pos_application
word_pos_list = self.wordnet_pos_tag(batch_text_list)
line_word_pos_list = []
for (word, pos) in word_pos_list:
if word != 'NL_':
line_word_pos_list.append((word, pos))
try:
new_freq = word_pos_freq_dict[(word, pos)] + 1
word_pos_freq_dict[(word, pos)] = new_freq
except KeyError:
word_pos_freq_dict[(word, pos)] = 1
else:
summary_word_pos_per_line_list.append(line_word_pos_list)
line_word_pos_list = []
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Summary pos tags have been obtained.')
del input_summary_batch_text_list
with open(output_summary_word_pos_line_list_sample_txt_file, 'w', encoding='utf8') as f:
index = 0
for word_pos_line_list in summary_word_pos_per_line_list:
f.write('{}\n'.format(word_pos_line_list))
index += 1
if index == lines_of_sample_files:
break
with open(output_summary_word_pos_line_list_pickle_file, 'wb') as f:
pickle.dump(summary_word_pos_per_line_list, f)
print('Summary files have been created:\n\t{}\n\t{}'.format(
output_summary_word_pos_line_list_sample_txt_file, output_summary_word_pos_line_list_pickle_file))
del summary_word_pos_per_line_list
word_pos_freq_list = []
for k, v in word_pos_freq_dict.items():
word_pos_freq_list.append((k, v))
word_pos_freq_list = sorted(word_pos_freq_list, key=lambda tup: -tup[1])
with open(output_word_pos_freq_dict_txt_file, 'w', encoding='utf8') as f:
for ((w, p), freq) in word_pos_freq_list:
f.write('{} {} {}\n'.format(w, p, freq))
with open(output_word_pos_freq_dict_pickle_file, 'wb') as f:
pickle.dump(word_pos_freq_dict, f)
print('Word-pos-freq files have been created:\n\t{}\n\t{}'.format(
output_word_pos_freq_dict_txt_file, output_word_pos_freq_dict_pickle_file))
# it returns an dictionayr of hyperonym paths of its word
def convert_dataset_with_hyperonyms(self, input_article_file_path, input_summary_file_path,
output_article_file_path, output_summary_file_path,
output_hypernyms_dict_pickle_file_path, output_hypernyms_dict_txt_file_path,
print_per_line=2, max_depth=6,
lines_per_pos_application=2000):
t0 = time.time()
general_hypernyms = ['abstraction', 'entity', 'attribute', 'whole', 'physical',
'entity', 'physical_entity', 'matter', 'object', 'relation', 'natural_object',
'psychological_feature']
stopword_list = nltk.corpus.stopwords.words('english')
general_categories = [] # ['PERSON_', 'LOCATION_', 'ORGANIZATION_']
except_words = stopword_list + general_categories
word_set = set()
article_pos_list = []
summary_pos_list = []
print('Building dataset with hypernyms...')
print('Input files:\n\t{}\n\t{}'.format(input_article_file_path, input_summary_file_path))
print('Building dictionary...')
input_article_batch_text_list = []
input_summary_batch_text_list = []
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
with open(input_summary_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
count_words = 0
# with open(input_article_file_path, 'r', encoding='utf8') as f:
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_pos_application
pos_list = self.wordnet_pos_n_tag(batch_text_list)
article_pos_list.append(pos_list)
for (word, pos) in pos_list:
if pos == 'n' and word not in except_words and word != 'NL_':
word_set.add(word)
count_words += 1
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article vocabulary has been loaded.')
t1 = time.time()
line_index = 0
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_pos_application
pos_list = self.wordnet_pos_n_tag(batch_text_list)
summary_pos_list.append(pos_list)
for (word, pos) in pos_list:
if pos == 'n' and word not in except_words and word != 'NL_':
word_set.add(word)
count_words += 1
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_summary_batch_text_list
print('Summary vocabulary has been loaded.')
print('Dictionary has been built (count_words: {}).'.format(count_words))
print('Extracting hypernyms...')
word_hypernym_path_dict = dict()
lemmatizer = nltk.stem.WordNetLemmatizer()
for token_ in word_set:
token_lemma = lemmatizer.lemmatize(token_, pos='n')
synset = self.make_synset(token_lemma, category='n')
if synset is not None:
synset.max_depth()
merged_synset_list = self.merge_lists(synset.hypernym_paths())
sorted_synsets = self.syncet_sort_accornding_max_depth(merged_synset_list)
word_depth_list = self.word_depth_of_synsents(sorted_synsets)
if word_depth_list[0][0] != token_lemma:
word_depth_list = [(token_lemma, word_depth_list[0][1] + 1)] + word_depth_list
word_hypernym_path_dict[token_lemma] = word_depth_list
del word_set
###############
with open(output_hypernyms_dict_txt_file_path, 'w', encoding='utf8') as f:
for k, v in word_hypernym_path_dict.items():
f.write('{} {}\n'.format(k, v))
with open(output_hypernyms_dict_pickle_file_path, 'wb') as f:
pickle.dump(word_hypernym_path_dict, f)
print('Hypernyms have been written to files:\n\t{}\n\t{}'.format(
output_hypernyms_dict_pickle_file_path, output_hypernyms_dict_txt_file_path))
hypernym_changed_words_list_dict = dict()
print('writing article file...')
t1 = time.time()
# depth_greater_than = min_depth - 1
with open(output_article_file_path, 'w', encoding='utf8') as f:
line_index = 0
for pos_list in article_pos_list:
line_index += lines_per_pos_application
for (word, pos) in pos_list:
if word == 'NL_':
f.write('\n')
elif pos == 'n' and word not in except_words and word != None:
try:
hypernyms_path_list = word_hypernym_path_dict[word]
# print(hypernyms_path_list)
# hypernym_token = hypernyms_path_list[hypernym_offset][0]
depth = hypernyms_path_list[0][1]
if depth > max_depth:
for el in hypernyms_path_list:
if el[1] <= max_depth:
# print(el[2])
f.write(el[0] + '_ ')
if hypernym_changed_words_list_dict.get(el[0], None):
new_list = hypernym_changed_words_list_dict[el[0]][1] + [word]
new_freq = hypernym_changed_words_list_dict[el[0]][0] + 1
hypernym_changed_words_list_dict[el[0]] = (new_freq, new_list)
else:
hypernym_changed_words_list_dict[el[0]] = (1, [word])
break
else:
f.write(word + ' ')
except KeyError:
f.write(word + ' ')
except IndexError:
f.write(word + ' ')
else:
f.write(word + ' ')
# f.write('\n')
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del article_pos_list
print('writing summary file...')
t1 = time.time()
with open(output_summary_file_path, 'w', encoding='utf8') as f:
line_index = 0
for pos_list in summary_pos_list:
line_index += lines_per_pos_application
for (word, pos) in pos_list:
if word == 'NL_':
f.write('\n')
elif pos == 'n' and word not in except_words and word != None:
try:
hypernyms_path_list = word_hypernym_path_dict[word]
# hypernym_token = hypernyms_path_list[hypernym_offset][0]
depth = hypernyms_path_list[0][1]
if depth > max_depth:
for el in hypernyms_path_list:
if el[1] <= max_depth:
f.write(el[0] + '_ ')
if hypernym_changed_words_list_dict.get(el[0], None):
new_list = hypernym_changed_words_list_dict[el[0]][1] + [word]
new_freq = hypernym_changed_words_list_dict[el[0]][0] + 1
hypernym_changed_words_list_dict[el[0]] = (new_freq, new_list)
else:
hypernym_changed_words_list_dict[el[0]] = (1, [word])
break
else:
f.write(word + ' ')
except KeyError:
f.write(word + ' ')
except IndexError:
f.write(word + ' ')
else:
f.write(word + ' ')
# f.write('\n')
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Hypernyms and the words that they have replaced:')
for k, v in hypernym_changed_words_list_dict.items():
print(k, v[0], v[1])
print('Output files:\n\t{}\n\t{}'.format(output_article_file_path, output_summary_file_path))
# return word_hypernym_path_dict
# it returns an dictionayr of hyperonym paths of its word
def convert_dataset_with_ner_and_hyperonyms(self, input_article_file_path, input_summary_file_path,
output_article_file_path, output_summary_file_path,
output_hypernyms_dict_pickle_file_path,
output_hypernyms_dict_txt_file_path,
print_per_line=2, hypernym_offset=1, min_depth=5, max_depth=6,
lines_per_pos_application=2000):
t0 = time.time()
general_hypernyms = ['abstraction', 'entity', 'attribute', 'whole', 'physical',
'entity', 'physical_entity', 'matter', 'object', 'relation', 'natural_object',
'psychological_feature']
stopword_list = nltk.corpus.stopwords.words('english')
general_categories = ['PERSON_', 'LOCATION_', 'ORGANIZATION_']
except_words = stopword_list + general_categories
word_set = set()
article_pos_list = []
summary_pos_list = []
print('Building dataset with hypernyms...')
print('Input files:\n\t{}\n\t{}'.format(input_article_file_path, input_summary_file_path))
print('Building dictionary...')
input_article_batch_text_list = []
input_summary_batch_text_list = []
with open(input_article_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_article_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_article_batch_text_list.append(input_temp_list)
f.close()
with open(input_summary_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_pos_application:
input_summary_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_summary_batch_text_list.append(input_temp_list)
f.close()
count_words = 0
# with open(input_article_file_path, 'r', encoding='utf8') as f:
t1 = time.time()
line_index = 0
for batch_text_list in input_article_batch_text_list:
line_index += lines_per_pos_application
pos_list = self.wordnet_pos_n_tag(batch_text_list)
article_pos_list.append(pos_list)
for (word, pos) in pos_list:
if pos == 'n' and word not in except_words and word != 'NL_':
word_set.add(word)
count_words += 1
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_article_batch_text_list
print('Article vocabulary has been loaded.')
t1 = time.time()
line_index = 0
for batch_text_list in input_summary_batch_text_list:
line_index += lines_per_pos_application
pos_list = self.wordnet_pos_n_tag(batch_text_list)
summary_pos_list.append(pos_list)
for (word, pos) in pos_list:
if pos == 'n' and word not in except_words and word != 'NL_':
word_set.add(word)
count_words += 1
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del input_summary_batch_text_list
print('Summary vocabulary has been loaded.')
print('Dictionary has been built (count_words: {}).'.format(count_words))
print('Extracting hypernyms...')
word_hypernym_path_dict = dict()
lemmatizer = nltk.stem.WordNetLemmatizer()
for token_ in word_set:
token_lemma = lemmatizer.lemmatize(token_, pos='n')
synset = self.make_synset(token_lemma, category='n')
if synset is not None:
synset.max_depth()
merged_synset_list = self.merge_lists(synset.hypernym_paths())
sorted_synsets = self.syncet_sort_accornding_max_depth(merged_synset_list)
word_depth_list = self.word_depth_of_synsents(sorted_synsets)
if word_depth_list[0][0] != token_lemma:
word_depth_list = [(token_lemma, word_depth_list[0][1] + 1)] + word_depth_list
word_hypernym_path_dict[token_lemma] = word_depth_list
del word_set
###############
with open(output_hypernyms_dict_txt_file_path, 'w', encoding='utf8') as f:
for k, v in word_hypernym_path_dict.items():
f.write('{} {}\n'.format(k, v))
with open(output_hypernyms_dict_pickle_file_path, 'wb') as f:
pickle.dump(word_hypernym_path_dict, f)
print('Hypernyms have been written to files:\n\t{}\n\t{}'.format(
output_hypernyms_dict_pickle_file_path, output_hypernyms_dict_txt_file_path))
hypernym_changed_words_list_dict = dict()
print('writing article file...')
t1 = time.time()
# depth_greater_than = min_depth - 1
with open(output_article_file_path, 'w', encoding='utf8') as f:
line_index = 0
for pos_list in article_pos_list:
line_index += lines_per_pos_application
for (word, pos) in pos_list:
if word == 'NL_':
f.write('\n')
elif pos == 'n' and word not in except_words and word != None:
try:
hypernyms_path_list = word_hypernym_path_dict[word]
# print(hypernyms_path_list)
# hypernym_token = hypernyms_path_list[hypernym_offset][0]
depth = hypernyms_path_list[0][1]
if depth > max_depth:
for el in hypernyms_path_list:
if el[1] <= max_depth:
# print(el[2])
f.write(el[0] + '_ ')
if hypernym_changed_words_list_dict.get(el[0], None):
new_list = hypernym_changed_words_list_dict[el[0]][1] + [word]
new_freq = hypernym_changed_words_list_dict[el[0]][0] + 1
hypernym_changed_words_list_dict[el[0]] = (new_freq, new_list)
else:
hypernym_changed_words_list_dict[el[0]] = (1, [word])
break
else:
f.write(word + ' ')
except KeyError:
f.write(word + ' ')
except IndexError:
f.write(word + ' ')
else:
f.write(word + ' ')
# f.write('\n')
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
del article_pos_list
print('writing summary file...')
t1 = time.time()
with open(output_summary_file_path, 'w', encoding='utf8') as f:
line_index = 0
for pos_list in summary_pos_list:
line_index += lines_per_pos_application
for (word, pos) in pos_list:
if word == 'NL_':
f.write('\n')
elif pos == 'n' and word not in except_words and word != None:
try:
hypernyms_path_list = word_hypernym_path_dict[word]
# hypernym_token = hypernyms_path_list[hypernym_offset][0]
depth = hypernyms_path_list[0][1]
if depth > max_depth:
for el in hypernyms_path_list:
if el[1] <= max_depth:
f.write(el[0] + '_ ')
if hypernym_changed_words_list_dict.get(el[0], None):
new_list = hypernym_changed_words_list_dict[el[0]][1] + [word]
new_freq = hypernym_changed_words_list_dict[el[0]][0] + 1
hypernym_changed_words_list_dict[el[0]] = (new_freq, new_list)
else:
hypernym_changed_words_list_dict[el[0]] = (1, [word])
break
else:
f.write(word + ' ')
except KeyError:
f.write(word + ' ')
except IndexError:
f.write(word + ' ')
else:
f.write(word + ' ')
# f.write('\n')
if line_index % print_per_line == 0:
t = time.time()
print('{} line, Time (overall and per {} lines): {} & {:.2f}'.format(
line_index, 1000, datetime.timedelta(seconds=t - t0),
(t - t1) * 1000 / print_per_line))
t1 = t
print('Hypernyms and the words that they have replaced:')
for k, v in hypernym_changed_words_list_dict.items():
print(k, v[0], v[1])
print('Output files:\n\t{}\n\t{}'.format(output_article_file_path, output_summary_file_path))
# return word_hypernym_path_dict
@staticmethod
# it returns a list of tuples: (word, pos)
def wordnet_pos_n_tag(text_list):
# wordnet pos: (ADJ, ADJ_SAT, ADV, NOUN, VERB) = ('a', 's', 'r', 'n', 'v')
pos_tag_list = nltk.tag.pos_tag(text_list)
word_pos_list = []
for (w, pos) in pos_tag_list:
wordnet_pos = 'other'
if pos.startswith('N'):
wordnet_pos = 'n' # nltk.corpus.wordnet.NOUN
word_pos_list.append((w, wordnet_pos))
return word_pos_list
def min_common_hyperonym_of_vocabulary(self, input_file_path='path/to/file',
output_dictionary_pickle_file="", output_dict_txt_file="",
time_of_pass=1):
stopword_list = nltk.corpus.stopwords.words('english')
general_categories = ['PERSON_', 'LOCATION_', 'ORGANIZATION_']
general_hypernyms = ['abstraction', 'entity', 'attribute', 'whole', 'physical',
'entity', 'physical_entity', 'matter', 'object', 'relation', 'natural_object']
except_words = stopword_list + general_categories
# print(except_words)
word_pos_freq_dict = dict()
print('Building dictionary')
count_words = 0
with open(input_file_path, 'r', encoding='utf8') as f:
for line in f:
line_list = line.split()
pos_list, _ = self.wordnet_pos_tag(line_list)
for word, pos in pos_list:
if pos == 'n' and word not in except_words:
if word_pos_freq_dict.get((word, pos), None):
new_freq = word_pos_freq_dict[(word, pos)] + 1
word_pos_freq_dict[(word, pos)] = new_freq
else:
word_pos_freq_dict[(word, pos)] = 1
count_words += 1
print('dictionary is built. count_words: {}'.format(count_words))
word_pos_list = []
for k, v in word_pos_freq_dict.items():
word_pos_list.append((k, v))
##################
# print(k, v)
word_pos_list = sorted(word_pos_list, key=lambda tup: -tup[1])
#############
# print(word_pos_list)
del word_pos_freq_dict
word_hypernym_dict = dict()
# word2_start_index = 0
for ((word1, pos1), freq1) in word_pos_list:
# word2_start_index += 1
hypernym_freq_dict = dict()
try:
synset1 = self.make_synset(word1)
for ((word2, pos2), freq2) in word_pos_list:
synset2 = self.make_synset(word2)
# try:
common_hypernyms = synset1.lowest_common_hypernyms(synset2)
# except Exception:
if common_hypernyms != []:
for ch in common_hypernyms:
ch_word = self.synset_word(ch)
if ch_word not in general_hypernyms:
if hypernym_freq_dict.get(ch_word, None):
new_freq = hypernym_freq_dict[ch_word] + freq2
hypernym_freq_dict[ch_word] = new_freq
else:
hypernym_freq_dict[ch_word] = freq2
else:
word_hypernym_dict[word1] = word1
except nltk.corpus.reader.wordnet.WordNetError:
word_hypernym_dict[word1] = word1
max_freq = 0
for k, v in hypernym_freq_dict.items():
############
# print(k, v)
if v > max_freq:
word_hypernym_dict[word1] = k
################
for k, v in word_hypernym_dict.items():
if k != v:
print(k, v)
def convert_dataset_with_hypernyms(self, input_article_file_path, output_article_file_path,
input_summary_file_path, output_summary_file_path,
lines_per_ner_application=2500, print_per_lines=10000):
for (input_file_path, output_file_path) in [(input_article_file_path, output_article_file_path),
(input_summary_file_path, output_summary_file_path)]:
self.convert_text_with_hypernyms(input_file_path, output_file_path,
lines_per_ner_application=lines_per_ner_application,
print_per_lines=print_per_lines)
def convert_dataset_with_ner(self, input_article_file_path, input_summary_file_path,
output_article_file_path, output_summary_file_path,
lines_per_ner_application=2500, print_per_lines=10000):
for (input_file_path, output_file_path) in [(input_article_file_path, output_article_file_path),
(input_summary_file_path, output_summary_file_path)]:
self.convert_text_with_ner(input_file_path, output_file_path,
lines_per_ner_application=lines_per_ner_application,
print_per_lines=print_per_lines)
def convert_text_with_hypernyms(self, input_file_path, output_file_path,
lines_per_ner_application=2500, print_per_lines=10000):
print('Named Entity Recognition and convert the dataset')
print('Input file: {}\n'.format(input_file_path))
input_batch_text_list = []
with open(input_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_batch_text_list.append(input_temp_list)
f.close()
print('Input data loaded.')
ner_list = []
lines_index = 0
t0 = time.time()
for el_list in input_batch_text_list:
ner_list += self.stanford_ner(el_list)
lines_index += lines_per_ner_application
if lines_index % print_per_lines == 0:
dt = time.time() - t0
print('NER: {} lines, Time (total and avg per 1000 lines) {} & {:.3f} sec,'.format(
lines_index, datetime.timedelta(seconds=dt), dt * 1000 / lines_index))
del input_batch_text_list
print('NER have been run')
ner_freq_dict = dict()
ner_tag_list = ['LOCATION', 'PERSON', 'ORGANIZATION']
output_file = open(output_file_path, 'w', encoding='utf8')
previous_text = ''
for (token, ner) in ner_list:
# temp_text = token
if token == 'NL_':
output_file.write('\n')
previous_text = ''
elif ner in ner_tag_list:
if ner != previous_text:
output_file.write(ner + '_ ')
previous_text = ner
if ner_freq_dict.get(ner, None):
new_freq = ner_freq_dict[ner] + 1
ner_freq_dict[ner] = new_freq
else:
ner_freq_dict[ner] = 1
else:
output_file.write(token + ' ')
previous_text = token
output_file.close()
del ner_list
for k, v in ner_freq_dict.items():
print(k, v)
print('Output file: {}\n'.format(output_file_path))
def convert_text_with_ner(self, input_file_path, output_file_path,
lines_per_ner_application=2500, print_per_lines=10000):
print('Named Entity Recognition and convert the dataset')
print('Input file: {}\n'.format(input_file_path))
input_batch_text_list = []
with open(input_file_path, 'r', encoding='utf8') as f:
line_index = 0
input_temp_list = []
for line in f:
line_index += 1
input_temp_list += line.split() + ['NL_']
if line_index == lines_per_ner_application:
input_batch_text_list.append(input_temp_list)
input_temp_list = []
line_index = 0
if line_index > 0:
input_batch_text_list.append(input_temp_list)
f.close()
print('Input data loaded.')
ner_list = []
lines_index = 0
t0 = time.time()
stanford_ner_tagger_dir = "C:/Stanford_NLP_Tools/stanford-ner-2018-10-16/"
model = ['english.all.3class.distsim.crf.ser.gz', 'english.all.3class.distsim.crf.ser.gz']
ner = nltk.tag.stanford.StanfordNERTagger(
stanford_ner_tagger_dir + 'classifiers/' + model[0],
stanford_ner_tagger_dir + 'stanford-ner-3.9.2.jar')
for el_list in input_batch_text_list:
ner_list += ner.tag(el_list) # self.stanford_ner(el_list)
lines_index += lines_per_ner_application
if lines_index % print_per_lines == 0:
dt = time.time() - t0
print('NER: {} lines, Time (total and avg per 1000 lines) {} & {:.3f} sec,'.format(
lines_index, datetime.timedelta(seconds=dt), dt * 1000 / lines_index))
del input_batch_text_list
print('NER have been run')
ner_freq_dict = dict()
ner_tag_list = ['LOCATION', 'PERSON', 'ORGANIZATION']
output_file = open(output_file_path, 'w', encoding='utf8')
previous_text = ''
for (token, ner) in ner_list:
# temp_text = token
if token == 'NL_':
output_file.write('\n')
previous_text = ''
elif ner in ner_tag_list:
if ner != previous_text:
output_file.write(ner + '_ ')
previous_text = ner
if ner_freq_dict.get(ner, None):
new_freq = ner_freq_dict[ner] + 1
ner_freq_dict[ner] = new_freq
else:
ner_freq_dict[ner] = 1
else:
output_file.write(token + ' ')
previous_text = token
output_file.close()
del ner_list
for k, v in ner_freq_dict.items():
print(k, v)
print('Output file: {}\n'.format(output_file_path))
def stanford_ner(self, text_list):
# http://www.nltk.org/api/nltk.tag.html#module-nltk.tag.stanford
# https://nlp.stanford.edu/software/CRF-NER.html#Starting
java_path = 'C:\Program Files (x86)\Java\jre1.8.0_201/bin/java.exe'
os.environ['JAVAHOME'] = java_path
stanford_ner_tagger_dir = "C:/Stanford_NLP_Tools/stanford-ner-2018-10-16/"
model = ['english.all.3class.distsim.crf.ser.gz', 'english.conll.4class.distsim.crf.ser.gz',
'english.all.3class.distsim.crf.ser.gz']
ner = nltk.tag.stanford.StanfordNERTagger(
stanford_ner_tagger_dir + 'classifiers/' + model[0],
stanford_ner_tagger_dir + 'stanford-ner-3.9.2.jar')
return ner.tag(text_list)
@staticmethod
# it returns a list of tuples: (word, pos)
def wordnet_pos_tag(text_list):
# wordnet pos: (ADJ, ADJ_SAT, ADV, NOUN, VERB) = ('a', 's', 'r', 'n', 'v')
pos_tag_list = nltk.tag.pos_tag(text_list)
# print(pos_tag_list)
word_pos_list = []
# size = 0
for (w, pos) in pos_tag_list:
# size += 1
wordnet_pos = pos
if pos.startswith('J'):
wordnet_pos = 'a' # nltk.corpus.wordnet.ADJ
elif pos.startswith('V'):
wordnet_pos = 'v' # nltk.corpus.wordnet.VERB
elif pos.startswith('N'):
wordnet_pos = 'n' # nltk.corpus.wordnet.NOUN
elif pos.startswith('R'):
wordnet_pos = 'r' # nltk.corpus.wordnet.ADV
word_pos_list.append((w, wordnet_pos))
# lemma = lemmatizer.lemmatize(w, pos=wordnet_pos)
# new_text += lemma + ' '
# print(word_pos_list)
return word_pos_list
def nltk_pos_of_sentence(self, sentence):
return nltk.tag.pos_tag(sentence.split())
def stanford_pos(self, text_list):
# http://www.nltk.org/api/nltk.tag.html#module-nltk.tag.stanford
# https://nlp.stanford.edu/software/tagger.shtml
# java_path = 'C:\Program Files (x86)\Java\jre1.8.0_191/bin/java.exe'
# os.environ['JAVAHOME'] = java_path
stanford_pos_tagger_dir = "C:/Stanford_NLP_Tools/stanford-postagger-full-2018-10-16/"
pos = nltk.tag.stanford.StanfordPOSTagger(
stanford_pos_tagger_dir + 'models/english-bidirectional-distsim.tagger',
stanford_pos_tagger_dir + 'stanford-postagger-3.9.2.jar')
return pos.tag(text_list)
def hyperonyms_paths(self, sentence, stopword_list):
# gensim_model = gensim.models.KeyedVectors.load_word2vec_format(param.word2vec_file_path, binary=False)
############
print(sentence)
word_hyperonym_list = []
token_list = sentence.split()
word_pos_list, size = self.wordnet_pos_tag(token_list)
for (token, pos) in word_pos_list:
if token in stopword_list or pos == 'other_pos':
word_hyperonym_list.append([token])
else:
synset = self.make_synset(token, category=pos)
synset.max_depth()
merged_synset_list = self.merge_lists(synset.hypernym_paths())
sorted_synsets = self.syncet_sort_accornding_max_depth(merged_synset_list)
word_depth_list = self.word_depth_of_synsents(sorted_synsets)
print(word_depth_list)
# hypernyms = self.all_hypernyms(synset)
# print(hypernyms)
def word_depth_of_synsents(self, synset_depth_list):
word_depth_list = []
for (s, d) in synset_depth_list:
word_depth_list.append((self.synset_word(s), d))
return word_depth_list
def merge_lists(self, list_of_list):
merged_set = set()
for el in list_of_list:
for e in el:
merged_set.add(e)
return list(merged_set)
def syncet_sort_accornding_max_depth(self, synsets_list):
sorted_synset_list = []
for synset in synsets_list:
sorted_synset_list.append((synset, synset.max_depth()))
return sorted(sorted_synset_list, key=lambda tup: -tup[1])
def hypernym_path(self, synset):
return synset.hypernym_paths()
@staticmethod
def make_synset(word, category='n', number='01'):
"""Make a synset"""
try:
return wordnet.synset('{}.{}.{}'.format(word, category, number))
except nltk.corpus.reader.wordnet.WordNetError:
return None
def _recurse_all_hypernyms(self, synset, all_hypernyms):
synset_hypernyms = synset.hypernyms()
if synset_hypernyms:
all_hypernyms += synset_hypernyms
for hypernym in synset_hypernyms:
self._recurse_all_hypernyms(hypernym, all_hypernyms)
def all_hypernyms(self, synset):
"""Get the set of hypernyms of the hypernym of the synset etc.
Nouns can have multiple hypernyms, so we can't just create a depth-sorted
list."""
hypernyms = []
self._recurse_all_hypernyms(synset, hypernyms)
return set(hypernyms)
def depth_of_synset(self, synset):
return
def _recurse_leaf_hyponyms(self, synset, leaf_hyponyms):
synset_hyponyms = synset.hyponyms()
if synset_hyponyms:
for hyponym in synset_hyponyms:
self._recurse_all_hyponyms(hyponym, leaf_hyponyms)
else:
leaf_hyponyms += synset
def leaf_hyponyms(self, synset):
"""Get the set of leaf nodes from the tree of hyponyms under the synset"""
hyponyms = []
self._recurse_leaf_hyponyms(synset, hyponyms)
return set(hyponyms)
def all_peers(self, synset):
"""Get the set of all peers of the synset (including the synset).
If the synset has multiple hypernyms then the peers will be hyponyms of
multiple synsets."""
hypernyms = synset.hypernyms()
peers = []
for hypernym in hypernyms:
peers += hypernym.hyponyms()
return set(peers)
def synset_synonyms(self, synset):
"""Get the synonyms for the synset"""
return set([lemma.synset for lemma in synset.lemmas])
def synset_antonyms(self, synset):
"""Get the antonyms for [the first lemma of] the synset"""
return set([lemma.synset for lemma in synset.lemmas[0].antonyms()])
def _recurse_all_hyponyms(self, synset, all_hyponyms):
synset_hyponyms = synset.hyponyms()
if synset_hyponyms:
all_hyponyms += synset_hyponyms
for hyponym in synset_hyponyms:
self._recurse_all_hyponyms(hyponym, all_hyponyms)
def all_hyponyms(self, synset):
"""Get the set of the tree of hyponyms under the synset"""
hyponyms = []
self._recurse_all_hyponyms(synset, hyponyms)
return set(hyponyms)
def synsets_words(self, synsets):
"""Get the set of strings for the words represented by the synsets"""
return list([self.synset_word(synset) for synset in synsets])
def synset_word(self, synset):
name = synset.name()
return name.split(sep='.')[0]
# Compute the Word Mover’s Distance between two sentences.
def sentence_similarity(self, sentence_1, sentence_2, gensim_model):
distance = gensim_model.wmdistance(sentence_1, sentence_2)
# print('distance = %.4f' % distance)
return distance
if __name__ == "__main__":
DataPreprocessing()
| 54.717296
| 127
| 0.537629
| 19,122
| 174,001
| 4.462295
| 0.03007
| 0.029346
| 0.01547
| 0.013009
| 0.843615
| 0.805093
| 0.770134
| 0.741644
| 0.721487
| 0.706755
| 0
| 0.01478
| 0.379038
| 174,001
| 3,179
| 128
| 54.734508
| 0.774943
| 0.029488
| 0
| 0.704593
| 0
| 0.004871
| 0.066943
| 0.005134
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020181
| false
| 0.000348
| 0.004871
| 0.001044
| 0.034795
| 0.09499
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c33e5d4b6cf2e53380b6ba79e9eec8883965da8c
| 123
|
py
|
Python
|
release/scripts/presets/fluid/oil.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 365
|
2015-02-10T15:10:55.000Z
|
2022-03-03T15:50:51.000Z
|
release/scripts/presets/fluid/oil.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 45
|
2015-01-09T15:34:20.000Z
|
2021-10-05T14:44:23.000Z
|
release/scripts/presets/fluid/oil.py
|
rbabari/blender
|
6daa85f14b2974abfc3d0f654c5547f487bb3b74
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 172
|
2015-01-25T15:16:53.000Z
|
2022-01-31T08:25:36.000Z
|
import bpy
bpy.context.fluid.domain_settings.viscosity_base = 5.0
bpy.context.fluid.domain_settings.viscosity_exponent = 5
| 30.75
| 56
| 0.845528
| 19
| 123
| 5.263158
| 0.578947
| 0.2
| 0.3
| 0.42
| 0.76
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0.065041
| 123
| 3
| 57
| 41
| 0.843478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c3787cb80576337d976847ffb342a7916e9c9161
| 10,516
|
py
|
Python
|
tests/v2/test_0527-fix-unionarray-ufuncs-and-parameters-in-merging.py
|
douglasdavis/awkward-1.0
|
f00775803a5568efb0a8e2dae3b1a4f23228fa40
|
[
"BSD-3-Clause"
] | null | null | null |
tests/v2/test_0527-fix-unionarray-ufuncs-and-parameters-in-merging.py
|
douglasdavis/awkward-1.0
|
f00775803a5568efb0a8e2dae3b1a4f23228fa40
|
[
"BSD-3-Clause"
] | null | null | null |
tests/v2/test_0527-fix-unionarray-ufuncs-and-parameters-in-merging.py
|
douglasdavis/awkward-1.0
|
f00775803a5568efb0a8e2dae3b1a4f23228fa40
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import pytest # noqa: F401
import numpy as np # noqa: F401
import awkward as ak # noqa: F401
# https://github.com/scikit-hep/awkward-1.0/issues/459#issuecomment-694941328
#
# So the rules would be,
# * if arrays have different `__array__` or `__record__` parameters, they are not equal;
# * if they otherwise have different parameters, the types can be equal, but merging
# (concatenation, option-simplify, or union-simplify) removes parameters other than
# `__array__` and `__record__`.
def test_0459_types():
plain_plain = ak._v2.highlevel.Array([0.0, 1.1, 2.2, 3.3, 4.4])
array_plain = ak._v2.operations.structure.with_parameter(
plain_plain, "__array__", "zoinks"
)
plain_isdoc = ak._v2.operations.structure.with_parameter(
plain_plain, "__doc__", "This is a zoink."
)
array_isdoc = ak._v2.operations.structure.with_parameter(
array_plain, "__doc__", "This is a zoink."
)
assert ak._v2.operations.describe.parameters(plain_plain) == {}
assert ak._v2.operations.describe.parameters(array_plain) == {"__array__": "zoinks"}
assert ak._v2.operations.describe.parameters(plain_isdoc) == {
"__doc__": "This is a zoink."
}
assert ak._v2.operations.describe.parameters(array_isdoc) == {
"__array__": "zoinks",
"__doc__": "This is a zoink.",
}
assert ak._v2.operations.describe.type(
plain_plain
) == ak._v2.operations.describe.type(plain_plain)
assert ak._v2.operations.describe.type(
array_plain
) == ak._v2.operations.describe.type(array_plain)
assert ak._v2.operations.describe.type(
plain_isdoc
) == ak._v2.operations.describe.type(plain_isdoc)
assert ak._v2.operations.describe.type(
array_isdoc
) == ak._v2.operations.describe.type(array_isdoc)
assert ak._v2.operations.describe.type(
plain_plain
) != ak._v2.operations.describe.type(array_plain)
assert ak._v2.operations.describe.type(
array_plain
) != ak._v2.operations.describe.type(plain_plain)
assert ak._v2.operations.describe.type(
plain_plain
) == ak._v2.operations.describe.type(plain_isdoc)
assert ak._v2.operations.describe.type(
plain_isdoc
) == ak._v2.operations.describe.type(plain_plain)
assert ak._v2.operations.describe.type(
array_plain
) == ak._v2.operations.describe.type(array_isdoc)
assert ak._v2.operations.describe.type(
array_isdoc
) == ak._v2.operations.describe.type(array_plain)
assert ak._v2.operations.describe.type(
plain_isdoc
) != ak._v2.operations.describe.type(array_isdoc)
assert ak._v2.operations.describe.type(
array_isdoc
) != ak._v2.operations.describe.type(plain_isdoc)
assert array_plain.layout.parameters == {"__array__": "zoinks"}
assert (
ak._v2.operations.structure.without_parameters(array_plain).layout.parameters
== {}
)
assert plain_isdoc.layout.parameters == {"__doc__": "This is a zoink."}
assert (
ak._v2.operations.structure.without_parameters(plain_isdoc).layout.parameters
== {}
)
assert array_isdoc.layout.parameters == {
"__array__": "zoinks",
"__doc__": "This is a zoink.",
}
assert (
ak._v2.operations.structure.without_parameters(array_isdoc).layout.parameters
== {}
)
def test_0459():
plain_plain = ak._v2.highlevel.Array([0.0, 1.1, 2.2, 3.3, 4.4])
array_plain = ak._v2.operations.structure.with_parameter(
plain_plain, "__array__", "zoinks"
)
plain_isdoc = ak._v2.operations.structure.with_parameter(
plain_plain, "__doc__", "This is a zoink."
)
array_isdoc = ak._v2.operations.structure.with_parameter(
array_plain, "__doc__", "This is a zoink."
)
assert ak._v2.operations.describe.parameters(plain_plain) == {}
assert ak._v2.operations.describe.parameters(array_plain) == {"__array__": "zoinks"}
assert ak._v2.operations.describe.parameters(plain_isdoc) == {
"__doc__": "This is a zoink."
}
assert ak._v2.operations.describe.parameters(array_isdoc) == {
"__array__": "zoinks",
"__doc__": "This is a zoink.",
}
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_plain, plain_plain])
)
== {}
)
assert ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_plain, array_plain])
) == {"__array__": "zoinks"}
assert ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_isdoc, plain_isdoc])
) == {"__doc__": "This is a zoink."}
assert ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_isdoc, array_isdoc])
) == {
"__array__": "zoinks",
"__doc__": "This is a zoink.",
}
assert isinstance(
ak._v2.operations.structure.concatenate([plain_plain, plain_plain]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_plain, array_plain]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([plain_isdoc, plain_isdoc]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_isdoc, array_isdoc]).layout,
ak._v2.contents.NumpyArray,
)
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_plain, array_plain])
)
== {}
)
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_isdoc, array_isdoc])
)
== {}
)
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_plain, plain_plain])
)
== {}
)
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_isdoc, plain_isdoc])
)
== {}
)
assert isinstance(
ak._v2.operations.structure.concatenate([plain_plain, array_plain]).layout,
ak._v2.contents.UnionArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([plain_isdoc, array_isdoc]).layout,
ak._v2.contents.UnionArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_plain, plain_plain]).layout,
ak._v2.contents.UnionArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_isdoc, plain_isdoc]).layout,
ak._v2.contents.UnionArray,
)
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_plain, plain_isdoc])
)
== {}
)
assert ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_plain, array_isdoc])
) == {"__array__": "zoinks"}
assert (
ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([plain_isdoc, plain_plain])
)
== {}
)
assert ak._v2.operations.describe.parameters(
ak._v2.operations.structure.concatenate([array_isdoc, array_plain])
) == {"__array__": "zoinks"}
assert isinstance(
ak._v2.operations.structure.concatenate([plain_plain, plain_isdoc]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_plain, array_isdoc]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([plain_isdoc, plain_plain]).layout,
ak._v2.contents.NumpyArray,
)
assert isinstance(
ak._v2.operations.structure.concatenate([array_isdoc, array_plain]).layout,
ak._v2.contents.NumpyArray,
)
def test_0522():
content1 = ak._v2.highlevel.Array([0.0, 1.1, 2.2, 3.3, 4.4]).layout
content2 = ak._v2.highlevel.Array([0, 100, 200, 300, 400]).layout
tags = ak._v2.index.Index8(np.array([0, 0, 0, 1, 1, 0, 0, 1, 1, 1], np.int8))
index = ak._v2.index.Index64(np.array([0, 1, 2, 0, 1, 3, 4, 2, 3, 4], np.int64))
unionarray = ak._v2.highlevel.Array(
ak._v2.contents.UnionArray(tags, index, [content1, content2])
)
assert unionarray.tolist() == [0.0, 1.1, 2.2, 0, 100, 3.3, 4.4, 200, 300, 400]
assert (unionarray + 10).tolist() == [
10.0,
11.1,
12.2,
10,
110,
13.3,
14.4,
210,
310,
410,
]
assert (10 + unionarray).tolist() == [
10.0,
11.1,
12.2,
10,
110,
13.3,
14.4,
210,
310,
410,
]
assert (unionarray + range(0, 100, 10)).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (range(0, 100, 10) + unionarray).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (unionarray + np.arange(0, 100, 10)).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (np.arange(0, 100, 10) + unionarray).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (unionarray + ak._v2.highlevel.Array(np.arange(0, 100, 10))).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (ak._v2.highlevel.Array(np.arange(0, 100, 10)) + unionarray).tolist() == [
0.0,
11.1,
22.2,
30,
140,
53.3,
64.4,
270,
380,
490,
]
assert (unionarray + unionarray).tolist() == [
0.0,
2.2,
4.4,
0,
200,
6.6,
8.8,
400,
600,
800,
]
| 29.292479
| 91
| 0.597946
| 1,234
| 10,516
| 4.833063
| 0.099676
| 0.066398
| 0.180751
| 0.162307
| 0.85664
| 0.846915
| 0.844064
| 0.834675
| 0.809859
| 0.775989
| 0
| 0.067696
| 0.266736
| 10,516
| 358
| 92
| 29.374302
| 0.705745
| 0.048878
| 0
| 0.63354
| 0
| 0
| 0.04565
| 0
| 0
| 0
| 0
| 0
| 0.186335
| 1
| 0.009317
| false
| 0
| 0.009317
| 0
| 0.018634
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f3c05752f5067dc4497a1ea8cd4172240294c1f
| 29,877
|
py
|
Python
|
test/conftest.py
|
bibsian/lter
|
5e5ac08b62e031ddabad490cc59db4a945ea20f6
|
[
"MIT"
] | 5
|
2016-05-02T22:56:01.000Z
|
2016-08-18T23:03:05.000Z
|
test/conftest.py
|
bibsian/lter
|
5e5ac08b62e031ddabad490cc59db4a945ea20f6
|
[
"MIT"
] | 11
|
2016-08-03T20:39:42.000Z
|
2016-10-18T22:03:38.000Z
|
test/conftest.py
|
bibsian/database-development
|
5e5ac08b62e031ddabad490cc59db4a945ea20f6
|
[
"MIT"
] | null | null | null |
import pytest
from pandas import merge, concat, DataFrame, read_sql
from sqlalchemy import select, update, column
from collections import OrderedDict, namedtuple
import os, sys
from poplerGUI.logiclayer.datalayer import config as orm
from poplerGUI import class_inputhandler as ini
from poplerGUI.logiclayer.class_helpers import (
string_to_list, extract
)
rootpath = os.path.dirname(os.path.dirname( __file__ ))
end = os.path.sep
os.chdir(rootpath)
# --- Fixtures to use across all test in this folder --- #
# ------------------------------------------------------ #
# ---------------- meta data handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def meta_handle_1_count():
lentry = {
'globalid': 1,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.18'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle_2_density():
lentry = {
'globalid': 2,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.17'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle_3_biomass():
lentry = {
'globalid': 3,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.19'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle_4_percent_cover():
lentry = {
'globalid': 4,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.15'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle5():
lentry = {
'globalid': 6,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.29'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle7():
lentry = {
'globalid': 7,
'metaurl': ('http://sbc.lternet.edu/cgi-bin/showDataset.cgi?docid=knb-lter-sbc.30'),
'lter': 'SBC'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
@pytest.fixture
def meta_handle_free():
lentry = {
'globalid': 312,
'metaurl': ('http://gce-lter.marsci.uga.edu/public/app/dataset_details.asp?accession=PLT-GCES-0706'),
'lter': 'GCE'}
ckentry = {}
metainput = ini.InputHandler(
name='metacheck', tablename=None, lnedentry=lentry,
checks=ckentry)
return metainput
# ------------------------------------------------------ #
# ---------------- File loader handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def file_handle_split_columns():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'splitcolumn_data_test.csv'))
return fileinput
@pytest.fixture
def file_handle_wide_to_long():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_6_wide_to_long.csv'))
return fileinput
@pytest.fixture
def file_handle_1_count():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_1.csv'))
return fileinput
@pytest.fixture
def file_handle_2_density():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_2.csv'))
return fileinput
@pytest.fixture
def file_handle_3_biomass():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_3.csv'))
return fileinput
@pytest.fixture
def file_handle_4_percent_cover():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_4.csv'))
return fileinput
@pytest.fixture
def file_handle5():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'test' + end + 'Datasets_manual_test' + end +
'raw_data_test_5.csv'))
return fileinput
@pytest.fixture
def file_handle_free():
ckentry = {}
rbtn = {'.csv': True, '.txt': False,
'.xlsx': False}
lned = {
'sheet': '', 'delim': '', 'tskip': '', 'bskip': '',
'header': ''}
fileinput = ini.InputHandler(
name='fileoptions',tablename=None, lnedentry=lned,
rbtns=rbtn, checks=ckentry, session=True,
filename=(
rootpath + end + 'data'+ end +
'PLT-OTH-1509-Garden_1_0.csv'))
return fileinput
# ------------------------------------------------------ #
# ---------------- Study site handle --------------- #
# ----------------------------------------------------- #
@pytest.fixture
def site_handle_wide_to_long():
lned = {'study_site_key': 'SITE'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle_1_count():
lned = {'study_site_key': 'site'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle_2_density():
lned = {'study_site_key': 'SITE'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle_3_biomass():
lned = {'study_site_key': 'site'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle_4_percent_cover():
lned = {'study_site_key': 'site'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle5():
lned = {'study_site_key': 'SITE'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
@pytest.fixture
def site_handle_free():
lned = {'study_site_key': 'Location'}
sitehandle = ini.InputHandler(
name='siteinfo', lnedentry=lned, tablename='study_site_table')
return sitehandle
# ------------------------------------------------------ #
# ---------------- Project table handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def project_handle_1_count():
studytype = namedtuple('studytype', 'checked entry unit')
# derived
derived = namedtuple('derived', 'checked entry unit')
# treatments
treatments = namedtuple('treatments', 'checked entry unit')
# Contacts: author, contact email
contacts = namedtuple('contacts', 'checked entry unit')
# Community
community = namedtuple('community', 'checked entry unit')
# SamplingFreq
sampfreq = namedtuple('sampfreq', 'checked entry unit')
# Datatype/units
dtype = namedtuple('dtype', 'checked entry unit')
# organism structure
structure = namedtuple('structure', 'checked entry unit')
# Spatial extent
ext = namedtuple('spatial_ext', 'checked entry unit')
form_dict = OrderedDict((
('samplingunits', dtype(False, '', None)),
('datatype', dtype(True, 'count', None)),
('structured_type_1', structure(True, 'size', 'cm')),
('structured_type_2', structure(False, '', '')),
('structured_type_3', structure(False, '', '')),
('samplefreq', sampfreq(True, 'month:yr', None)),
('studytype', studytype(True, 'obs', None)),
('community', community(True, True, None)),
('spatial_replication_level_1_extent', ext(True, '100', 'm2')),
('spatial_replication_level_2_extent', ext(True, '10', 'm2')),
('spatial_replication_level_3_extent', ext(False, '', '')),
('spatial_replication_level_4_extent', ext(False, '', '')),
('spatial_replication_level_5_extent', ext(False, '', '')),
('treatment_type_1', treatments(False, 'NULL', None)),
('treatment_type_2', treatments(False, 'NULL', None)),
('treatment_type_3', treatments(False, 'NULL', None)),
('derived', derived(True, 'no', None)),
('authors', contacts(True, 'AJ Bibian, TEX Miller', None)),
('authors_contact', contacts(True, 'aj@hotmail.com, tex@hotmail.com', None))
))
main_input = ini.InputHandler(
name='maininfo', tablename='project_table',
lnedentry=form_dict)
return main_input
@pytest.fixture
def project_handle_2_density():
main_input = ini.InputHandler(
name='maininfo', tablename='project_table')
return main_input
@pytest.fixture
def project_handle_3_biomass():
main_input = ini.InputHandler(
name='maininfo', tablename='project_table')
return main_input
@pytest.fixture
def project_handle_4_percent_cover():
main_input = ini.InputHandler(
name='maininfo', tablename='project_table')
return main_input
@pytest.fixture
def project_handle5():
main_input = ini.InputHandler(
name='maininfo', tablename='project_table')
return main_input
# ------------------------------------------------------ #
# ---------------- taxa table handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def taxa_handle_1_count():
taxalned = OrderedDict((
('sppcode', ''),
('kingdom', ''),
('subkingdom', ''),
('infrakingdom', ''),
('superdivision', ''),
('divsion', ''),
('subdivision', ''),
('superphylum', ''),
('phylum', ''),
('subphylum', ''),
('clss', ''),
('subclass', ''),
('ordr', ''),
('family', ''),
('genus', 'genus'),
('species', 'species')
))
taxackbox = OrderedDict((
('sppcode', False),
('kingdom', False),
('subkingdom', False),
('infrakingdom', False),
('superdivision', False),
('divsion', False),
('subdivision', False),
('superphylum', False),
('phylum', False),
('subphylum', False),
('clss', False),
('subclass', False),
('ordr', False),
('family', False),
('genus', True),
('species', True)
))
taxacreate = {
'taxacreate': False
}
available = [
x for x,y in zip(
list(taxalned.keys()), list(
taxackbox.values()))
if y is True
]
taxaini = ini.InputHandler(
name='taxainfo',
tablename='taxa_table',
lnedentry= extract(taxalned, available),
checks=taxacreate)
return taxaini
@pytest.fixture
def taxa_handle_2_density():
taxalned = OrderedDict((
('sppcode', ''),
('kingdom', 'TAXON_KINGDOM'),
('subkingdom', ''),
('infrakingdom', ''),
('superdivision', ''),
('divsion', ''),
('subdivision', ''),
('superphylum', ''),
('phylum', 'TAXON_PHYLUM'),
('subphylum', ''),
('clss', 'TAXON_CLASS'),
('subclass', ''),
('ordr', 'TAXON_ORDER'),
('family', 'TAXON_FAMILY'),
('genus', 'TAXON_GENUS'),
('species', 'TAXON_SPECIES')
))
taxackbox = OrderedDict((
('sppcode', False),
('kingdom', True),
('subkingdom', False),
('infrakingdom', False),
('superdivision', False),
('divsion', False),
('subdivision', False),
('superphylum', False),
('phylum', True),
('subphylum', False),
('clss', True),
('subclass', False),
('ordr', True),
('family', True),
('genus', True),
('species', True)
))
taxacreate = {
'taxacreate': False
}
available = [
x for x,y in zip(
list(taxalned.keys()), list(
taxackbox.values()))
if y is True
]
taxaini = ini.InputHandler(
name='taxainfo',
tablename='taxa_table',
lnedentry= extract(taxalned, available),
checks=taxacreate)
return taxaini
@pytest.fixture
def taxa_handle_3_biomass():
taxalned = OrderedDict((
('sppcode', ''),
('kingdom', ''),
('subkingdom', ''),
('infrakingdom', ''),
('superdivision', ''),
('divsion', ''),
('subdivision', ''),
('superphylum', ''),
('phylum', 'phylum'),
('subphylum', ''),
('clss', 'clss'),
('subclass', ''),
('ordr', 'ordr'),
('family', 'family'),
('genus', 'genus'),
('species', 'species')
))
taxackbox = OrderedDict((
('sppcode', False),
('kingdom', False),
('subkingdom', False),
('infrakingdom', False),
('superdivision', False),
('divsion', False),
('subdivision', False),
('superphylum', False),
('phylum', True),
('subphylum', False),
('clss', True),
('subclass', False),
('ordr', True),
('family', True),
('genus', True),
('species', True)
))
taxacreate = {
'taxacreate': False
}
available = [
x for x,y in zip(
list(taxalned.keys()), list(
taxackbox.values()))
if y is True
]
taxaini = ini.InputHandler(
name='taxainfo',
tablename='taxa_table',
lnedentry= extract(taxalned, available),
checks=taxacreate)
return taxaini
@pytest.fixture
def taxa_handle_4_percent_cover():
taxalned = OrderedDict((
('sppcode', 'code'),
('kingdom', ''),
('subkingdom', ''),
('infrakingdom', ''),
('superdivision', ''),
('divsion', ''),
('subdivision', ''),
('superphylum', ''),
('phylum', ''),
('subphylum', ''),
('clss', ''),
('subclass', ''),
('ordr', ''),
('family', ''),
('genus', ''),
('species', '')
))
taxackbox = OrderedDict((
('sppcode', True),
('kingdom', False),
('subkingdom', False),
('infrakingdom', False),
('superdivision', False),
('divsion', False),
('subdivision', False),
('superphylum', False),
('phylum', False),
('subphylum', False),
('clss', False),
('subclass', False),
('ordr', False),
('family', False),
('genus', False),
('species', False)
))
taxacreate = {
'taxacreate': False
}
available = [
x for x,y in zip(
list(taxalned.keys()), list(
taxackbox.values()))
if y is True
]
taxaini = ini.InputHandler(
name='taxainfo',
tablename='taxa_table',
lnedentry= extract(taxalned, available),
checks=taxacreate)
return taxaini
@pytest.fixture
def taxa_handle5():
taxalned = OrderedDict((
('sppcode', ''),
('kingdom', ''),
('subkingdom', ''),
('infrakingdom', ''),
('superdivision', ''),
('divsion', ''),
('subdivision', ''),
('superphylum', ''),
('phylum', ''),
('subphylum', ''),
('clss', ''),
('subclass', ''),
('ordr', ''),
('family', ''),
('genus', 'TAXON_GENUS'),
('species', 'TAXON_SPECIES'),
('common_name', 'Common_Name')
))
taxackbox = OrderedDict((
('sppcode', False),
('kingdom', False),
('subkingdom', False),
('infrakingdom', False),
('superdivision', False),
('divsion', False),
('subdivision', False),
('superphylum', False),
('phylum', False),
('subphylum', False),
('clss', False),
('subclass', False),
('ordr', False),
('family', False),
('genus', True),
('species', True),
('common_name', True)
))
taxacreate = {
'taxacreate': False
}
available = [
x for x,y in zip(
list(taxalned.keys()), list(
taxackbox.values()))
if y is True
]
taxaini = ini.InputHandler(
name='taxainfo',
tablename='taxa_table',
lnedentry= extract(taxalned, available),
checks=taxacreate)
return taxaini
# ------------------------------------------------------ #
# ---------------- time handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def time_handle_1_count():
d = {
'dayname': 'date',
'dayform': 'dd-mm-YYYY (Any Order)',
'monthname': 'date',
'monthform': 'dd-mm-YYYY (Any Order)',
'yearname': 'date',
'yearform': 'dd-mm-YYYY (Any Order)',
'jd': False,
'hms': False
}
timeini = ini.InputHandler(
name='timeinfo', tablename='timetable',
lnedentry= d)
return timeini
@pytest.fixture
def time_handle_2_density():
d = {
'dayname': '',
'dayform': 'NULL',
'monthname': 'MONTH',
'monthform': 'mm',
'yearname': 'YEAR',
'yearform': 'YYYY',
'jd': False,
'hms': False
}
timeini = ini.InputHandler(
name='timeinfo', tablename='timetable',
lnedentry= d)
return timeini
@pytest.fixture
def time_handle_3_biomass():
d = {
'dayname': '',
'dayform': 'NULL',
'monthname': 'month',
'monthform': 'mm',
'yearname': 'year',
'yearform': 'YYYY',
'jd': False,
'hms': False
}
timeini = ini.InputHandler(
name='timeinfo', tablename='timetable',
lnedentry= d)
return timeini
@pytest.fixture
def time_handle_4_percent_cover():
d = {
'dayname': '',
'dayform': 'NULL',
'monthname': 'month',
'monthform': 'mm',
'yearname': 'year',
'yearform': 'YYYY',
'jd': False,
'hms': False
}
timeini = ini.InputHandler(
name='timeinfo', tablename='timetable',
lnedentry= d)
return timeini
@pytest.fixture
def time_handle5():
d = {
'dayname': '',
'dayform': 'NULL',
'monthname': '',
'monthform': 'NULL',
'yearname': 'YEAR',
'yearform': 'YYYY',
'jd': False,
'hms': False
}
timeini = ini.InputHandler(
name='timeinfo', tablename='timetable',
lnedentry= d)
return timeini
# ------------------------------------------------------ #
# ---------------- covar handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def covar_handle_1_count():
covarlned = {'columns': None}
covarlned['columns'] = string_to_list('temp')
covarini = ini.InputHandler(
name='covarinfo', tablename='covartable',
lnedentry=covarlned)
return covarini
@pytest.fixture
def covar_handle_2_density():
covarlned = {'columns': None}
covarlned['columns'] = string_to_list('AREA, VIS, OBS_CODE')
covarini = ini.InputHandler(
name='covarinfo', tablename='covartable',
lnedentry=covarlned)
return covarini
@pytest.fixture
def covar_handle_3_biomass():
covarlned = {'columns': None}
covarlned['columns'] = string_to_list('temp')
covarini = ini.InputHandler(
name='covarinfo', tablename='covartable',
lnedentry=covarlned)
return covarini
@pytest.fixture
def covar_handle_4_percent_cover():
covarlned = {'columns': None}
covarlned['columns'] = string_to_list('Precip')
covarini = ini.InputHandler(
name='covarinfo', tablename='covartable',
lnedentry=covarlned)
return covarini
@pytest.fixture
def covar_handle5():
covarlned = {'columns': None}
covarlned['columns'] = string_to_list('TEMP, TAG')
covarini = ini.InputHandler(
name='covarinfo', tablename='covartable',
lnedentry=covarlned)
return covarini
# ------------------------------------------------------ #
# ---------------- obs table handle --------------- #
# ------------------------------------------------------ #
@pytest.fixture
def count_handle_1_count():
obslned = OrderedDict((
('spatial_replication_level_2', 'transect'),
('spatial_replication_level_3', ''),
('spatial_replication_level_4', ''),
('spatial_replication_level_5', ''),
('structured_type_1', ''),
('structured_type_2', ''),
('structured_type_3', ''),
('treatment_type_1', ''),
('treatment_type_2', ''),
('treatment_type_3', ''),
('unitobs', 'count')
))
obsckbox = OrderedDict((
('spatial_replication_level_2', True),
('spatial_replication_level_3', False),
('spatial_replication_level_4', False),
('spatial_replication_level_5', False),
('structured_type_1', False),
('structured_type_2', False),
('structured_type_3', False),
('treatment_type_1', False),
('treatment_type_2', False),
('treatment_type_3', False),
('unitobs', True)
))
available = [
x for x,y in zip(
list(obslned.keys()), list(
obsckbox.values()))
if y is True
]
countini = ini.InputHandler(
name='rawinfo',
tablename='count_table',
lnedentry=extract(obslned, available),
checks=obsckbox)
return countini
@pytest.fixture
def count_handle_2_density():
obslned = OrderedDict((
('spatial_replication_level_2', 'TRANSECT'),
('spatial_replication_level_3', 'QUAD'),
('spatial_replication_level_4', 'SIDE'),
('spatial_replication_level_5', ''),
('structured_type_1', ''),
('structured_type_2', ''),
('structured_type_3', ''),
('treatment_type_1', ''),
('treatment_type_2', ''),
('treatment_type_3', ''),
('unitobs', 'DENSITY')
))
obsckbox = OrderedDict((
('spatial_replication_level_2', True),
('spatial_replication_level_3', True),
('spatial_replication_level_4', True),
('spatial_replication_level_5', False),
('structured_type_1', False),
('structured_type_2', False),
('structured_type_3', False),
('treatment_type_1', False),
('treatment_type_2', False),
('treatment_type_3', False),
('unitobs', True)
))
available = [
x for x,y in zip(
list(obslned.keys()), list(
obsckbox.values()))
if y is True
]
countini = ini.InputHandler(
name='rawinfo',
tablename='density_table',
lnedentry=extract(obslned, available),
checks=obsckbox)
return countini
@pytest.fixture
def count_handle_3_biomass():
obslned = OrderedDict((
('spatial_replication_level_2', 'plot'),
('spatial_replication_level_3', 'quadrat'),
('spatial_replication_level_4', ''),
('spatial_replication_level_5', ''),
('structured_type_1', ''),
('structured_type_2', ''),
('structured_type_3', ''),
('treatment_type_1', ''),
('treatment_type_2', ''),
('treatment_type_3', ''),
('unitobs', 'biomass')
))
obsckbox = OrderedDict((
('spatial_replication_level_2', True),
('spatial_replication_level_3', True),
('spatial_replication_level_4', False),
('spatial_replication_level_5', False),
('structured_type_1', False),
('structured_type_2', False),
('structured_type_3', False),
('treatment_type_1', False),
('treatment_type_2', False),
('treatment_type_3', False),
('unitobs', True)
))
available = [
x for x,y in zip(
list(obslned.keys()), list(
obsckbox.values()))
if y is True
]
countini = ini.InputHandler(
name='rawinfo',
tablename='biomass_table',
lnedentry=extract(obslned, available),
checks=obsckbox)
return countini
@pytest.fixture
def biomass_handle_4_percent_cover():
obslned = OrderedDict((
('spatial_replication_level_2', 'block'),
('spatial_replication_level_3', 'plot'),
('spatial_replication_level_4', ''),
('spatial_replication_level_5', ''),
('structured_type_1', ''),
('structured_type_2', ''),
('structured_type_3', ''),
('treatment_type_1', ''),
('treatment_type_2', ''),
('treatment_type_3', ''),
('unitobs', 'cover')
))
obsckbox = OrderedDict((
('spatial_replication_level_2', True),
('spatial_replication_level_3', True),
('spatial_replication_level_4', False),
('spatial_replication_level_5', False),
('structured_type_1', False),
('structured_type_2', False),
('structured_type_3', False),
('treatment_type_1', False),
('treatment_type_2', False),
('treatment_type_3', False),
('unitobs', True)
))
available = [
x for x,y in zip(
list(obslned.keys()), list(
obsckbox.values()))
if y is True
]
countini = ini.InputHandler(
name='rawinfo',
tablename='percent_cover_table',
lnedentry=extract(obslned, available),
checks=obsckbox)
return countini
@pytest.fixture
def count_handle5():
obslned = OrderedDict((
('spatial_replication_level_2', 'TRANSECT'),
('spatial_replication_level_3', ''),
('spatial_replication_level_4', ''),
('spatial_replication_level_5', ''),
('structured_type_1', ''),
('structured_type_2', ''),
('structured_type_3', ''),
('treatment_type_1', ''),
('treatment_type_2', ''),
('treatment_type_3', ''),
('unitobs', '')
))
obsckbox = OrderedDict((
('spatial_replication_level_2', True),
('spatial_replication_level_3', False),
('spatial_replication_level_4', False),
('spatial_replication_level_5', False),
('structured_type_1', False),
('structured_type_2', False),
('structured_type_3', False),
('treatment_type_1', False),
('treatment_type_2', False),
('treatment_type_3', False),
('unitobs', True)
))
available = [
x for x,y in zip(
list(obslned.keys()), list(
obsckbox.values()))
if y is True
]
countini = ini.InputHandler(
name='rawinfo',
tablename='individual_table',
lnedentry=extract(obslned, available),
checks=obsckbox)
return countini
| 30.117944
| 109
| 0.537203
| 2,722
| 29,877
| 5.700955
| 0.099927
| 0.039374
| 0.04846
| 0.017013
| 0.822529
| 0.80165
| 0.784315
| 0.77742
| 0.746424
| 0.735211
| 0
| 0.008961
| 0.267898
| 29,877
| 991
| 110
| 30.148335
| 0.700498
| 0.049503
| 0
| 0.791061
| 0
| 0.007821
| 0.253211
| 0.047078
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052514
| false
| 0
| 0.008939
| 0
| 0.113966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48a374539424dd8b028aa6b6d394116cda70f9a8
| 317
|
py
|
Python
|
sysdfiles/device_file.py
|
ghuband/sysdfiles
|
eb13167b07b27351c3962a42b30dcd50ce551d71
|
[
"MIT"
] | null | null | null |
sysdfiles/device_file.py
|
ghuband/sysdfiles
|
eb13167b07b27351c3962a42b30dcd50ce551d71
|
[
"MIT"
] | 1
|
2020-06-05T20:58:05.000Z
|
2020-06-05T20:58:05.000Z
|
sysdfiles/device_file.py
|
ghuband/sysdfiles
|
eb13167b07b27351c3962a42b30dcd50ce551d71
|
[
"MIT"
] | 1
|
2020-06-09T02:35:35.000Z
|
2020-06-09T02:35:35.000Z
|
from .unit_file import UnitFile
# =============================================================================
# DeviceFile
# =============================================================================
class DeviceFile(UnitFile):
def __init__(self, file_name=''):
UnitFile.__init__(self, file_name)
| 28.818182
| 79
| 0.356467
| 19
| 317
| 5.368421
| 0.578947
| 0.156863
| 0.235294
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100946
| 317
| 10
| 80
| 31.7
| 0.357895
| 0.523659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
48eb4cfe0ffd389d010b788c8957230261e1c499
| 18,877
|
py
|
Python
|
tests/command_builder/test_command_builder_basics.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
tests/command_builder/test_command_builder_basics.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
tests/command_builder/test_command_builder_basics.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from statue.command_builder import CommandBuilder
from statue.context import Context
from statue.context_specification import ContextSpecification
from statue.exceptions import InconsistentConfiguration
from tests.constants import (
ARG1,
ARG2,
ARG3,
ARG4,
COMMAND1,
COMMAND_HELP_STRING1,
CONTEXT1,
CONTEXT2,
CONTEXT3,
CONTEXT4,
CONTEXT5,
CONTEXT6,
CONTEXT_HELP_STRING1,
CONTEXT_HELP_STRING2,
CONTEXT_HELP_STRING3,
CONTEXT_HELP_STRING4,
CONTEXT_HELP_STRING5,
CONTEXT_HELP_STRING6,
)
from tests.util import dummy_version
def test_command_builder_empty_constructor():
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert command_builder.name == COMMAND1
assert command_builder.install_name == COMMAND1
assert command_builder.help == COMMAND_HELP_STRING1
assert not command_builder.default_args
assert command_builder.version is None
assert not command_builder.required_contexts
assert not command_builder.allowed_contexts
assert not command_builder.specified_contexts
assert not command_builder.available_contexts
assert command_builder.contexts_specifications == {}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
"default_args=[], "
"version=None, "
"required_contexts=[], "
"allowed_contexts=[], "
"contexts_specifications={}"
")"
)
def test_command_builder_with_version():
version = dummy_version()
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, version=version
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == f"{COMMAND1}=={version}"
assert command_builder.help == COMMAND_HELP_STRING1
assert not command_builder.default_args
assert command_builder.version == version
assert not command_builder.required_contexts
assert not command_builder.allowed_contexts
assert not command_builder.specified_contexts
assert not command_builder.available_contexts
assert command_builder.contexts_specifications == {}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
"default_args=[], "
f"version={version}, "
"required_contexts=[], "
"allowed_contexts=[], "
"contexts_specifications={}"
")"
)
def test_command_builder_with_default_args():
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, default_args=[ARG1, ARG2]
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == COMMAND1
assert command_builder.help == COMMAND_HELP_STRING1
assert command_builder.default_args == [ARG1, ARG2]
assert command_builder.version is None
assert not command_builder.required_contexts
assert not command_builder.allowed_contexts
assert not command_builder.specified_contexts
assert not command_builder.available_contexts
assert command_builder.contexts_specifications == {}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
f"default_args=['{ARG1}', '{ARG2}'], "
"version=None, "
"required_contexts=[], "
"allowed_contexts=[], "
"contexts_specifications={}"
")"
)
def test_command_builder_with_required_contexts():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
command_builder = CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
required_contexts=[context1, context2],
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == COMMAND1
assert command_builder.help == COMMAND_HELP_STRING1
assert not command_builder.default_args
assert command_builder.version is None
assert command_builder.required_contexts == {context1, context2}
assert not command_builder.allowed_contexts
assert not command_builder.specified_contexts
assert command_builder.available_contexts == {context1, context2}
assert command_builder.contexts_specifications == {}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
"default_args=[], "
"version=None, "
f"required_contexts=['{CONTEXT1}', '{CONTEXT2}'], "
"allowed_contexts=[], "
"contexts_specifications={}"
")"
)
def test_command_builder_with_allowed_contexts():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
command_builder = CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
allowed_contexts=[context1, context2],
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == COMMAND1
assert command_builder.help == COMMAND_HELP_STRING1
assert not command_builder.default_args
assert command_builder.version is None
assert not command_builder.required_contexts
assert command_builder.allowed_contexts == {context1, context2}
assert not command_builder.specified_contexts
assert command_builder.available_contexts == {context1, context2}
assert command_builder.contexts_specifications == {}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
"default_args=[], "
"version=None, "
"required_contexts=[], "
f"allowed_contexts=['{CONTEXT1}', '{CONTEXT2}'], "
"contexts_specifications={}"
")"
)
def test_command_builder_with_specified_contexts():
context_specification1, context_specification2 = (
ContextSpecification(args=[ARG1]),
ContextSpecification(add_args=[ARG2]),
)
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
command_builder = CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
contexts_specifications={
context1: context_specification1,
context2: context_specification2,
},
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == COMMAND1
assert command_builder.help == COMMAND_HELP_STRING1
assert not command_builder.default_args
assert command_builder.version is None
assert not command_builder.required_contexts
assert not command_builder.allowed_contexts
assert command_builder.specified_contexts == {context1, context2}
assert command_builder.available_contexts == {context1, context2}
assert command_builder.contexts_specifications == {
context1: context_specification1,
context2: context_specification2,
}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
"default_args=[], "
"version=None, "
"required_contexts=[], "
"allowed_contexts=[], "
"contexts_specifications={"
f"'{CONTEXT1}': {str(context_specification1)}, "
f"'{CONTEXT2}': {str(context_specification2)}"
"}"
")"
)
def test_command_builder_with_all_fields():
context1, context2, context3, context4, context5, context6 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
Context(name=CONTEXT3, help=CONTEXT_HELP_STRING3),
Context(name=CONTEXT4, help=CONTEXT_HELP_STRING4),
Context(name=CONTEXT5, help=CONTEXT_HELP_STRING5),
Context(name=CONTEXT6, help=CONTEXT_HELP_STRING6),
)
context_specification1, context_specification2 = (
ContextSpecification(args=[ARG3]),
ContextSpecification(add_args=[ARG4]),
)
version = dummy_version()
command_builder = CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
default_args=[ARG1, ARG2],
version=version,
required_contexts=[context1, context2],
allowed_contexts=[context3, context4],
contexts_specifications={
context5: context_specification1,
context6: context_specification2,
},
)
assert command_builder.name == COMMAND1
assert command_builder.install_name == f"{COMMAND1}=={version}"
assert command_builder.help == COMMAND_HELP_STRING1
assert command_builder.default_args == [ARG1, ARG2]
assert command_builder.version == version
assert command_builder.required_contexts == {context1, context2}
assert command_builder.allowed_contexts == {context3, context4}
assert command_builder.specified_contexts == {context5, context6}
assert command_builder.available_contexts == {
context1,
context2,
context3,
context4,
context5,
context6,
}
assert command_builder.contexts_specifications == {
context5: context_specification1,
context6: context_specification2,
}
assert str(command_builder) == (
"CommandBuilder("
f"name={COMMAND1}, "
f"help={COMMAND_HELP_STRING1}, "
f"default_args=['{ARG1}', '{ARG2}'], "
f"version={version}, "
f"required_contexts=['{CONTEXT1}', '{CONTEXT2}'], "
f"allowed_contexts=['{CONTEXT3}', '{CONTEXT4}'], "
"contexts_specifications={"
f"'{CONTEXT5}': {str(context_specification1)}, "
f"'{CONTEXT6}': {str(context_specification2)}"
"}"
")"
)
def test_command_builder_set_required_contexts():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert not command_builder.required_contexts
command_builder.required_contexts = [context1, context2]
assert command_builder.required_contexts == {context1, context2}
def test_command_builder_set_allowed_contexts():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert not command_builder.allowed_contexts
command_builder.allowed_contexts = [context1, context2]
assert command_builder.allowed_contexts == {context1, context2}
def test_command_builder_set_contexts_specification():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
contexts_specifications = {
context1: ContextSpecification(add_args=[ARG1]),
context2: ContextSpecification(clear_args=True),
}
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert not command_builder.contexts_specifications
command_builder.contexts_specifications = contexts_specifications
assert command_builder.contexts_specifications == contexts_specifications
def test_command_builder_reset_all_available_contexts():
context1, context2, context3, context4, context5, context6 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
Context(name=CONTEXT3, help=CONTEXT_HELP_STRING3),
Context(name=CONTEXT4, help=CONTEXT_HELP_STRING4),
Context(name=CONTEXT5, help=CONTEXT_HELP_STRING5),
Context(name=CONTEXT6, help=CONTEXT_HELP_STRING6),
)
context_specification1, context_specification2 = (
ContextSpecification(args=[ARG3]),
ContextSpecification(add_args=[ARG4]),
)
command_builder = CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
required_contexts=[context1, context2],
allowed_contexts=[context3, context4],
contexts_specifications={
context5: context_specification1,
context6: context_specification2,
},
)
assert command_builder.available_contexts == {
context1,
context2,
context3,
context4,
context5,
context6,
}
command_builder.reset_all_available_contexts()
assert not command_builder.available_contexts
def test_command_builder_constructor_fail_on_one_context_both_allowed_and_required():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and required contexts clash "
rf"\({COMMAND1} -> allowed/required -> {CONTEXT1}\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
allowed_contexts=[context1, context2],
required_contexts=[context1],
)
def test_command_builder_constructor_fail_with_two_contexts_both_allowed_and_required():
context1, context2, context3 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
Context(name=CONTEXT3, help=CONTEXT_HELP_STRING3),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and required contexts clash "
rf"\({COMMAND1} -> allowed/required\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
allowed_contexts=[context1, context2, context3],
required_contexts=[context1, context2],
)
def test_command_builder_constructor_fail_on_one_context_both_allowed_and_specified():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and specified contexts clash "
rf"\({COMMAND1} -> allowed/specified -> {CONTEXT1}\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
allowed_contexts=[context1, context2],
contexts_specifications={context1: ContextSpecification(args=[ARG1])},
)
def test_command_builder_constructor_fail_on_two_contexts_both_allowed_and_specified():
context1, context2, context3 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
Context(name=CONTEXT3, help=CONTEXT_HELP_STRING3),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and specified contexts clash "
rf"\({COMMAND1} -> allowed/specified\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
allowed_contexts=[context1, context2, context3],
contexts_specifications={
context1: ContextSpecification(args=[ARG1]),
context2: ContextSpecification(args=[ARG2]),
},
)
def test_command_builder_constructor_fail_on_one_context_both_required_and_specified():
context1, context2 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^required and specified contexts clash "
rf"\({COMMAND1} -> required/specified -> {CONTEXT1}\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
required_contexts=[context1, context2],
contexts_specifications={context1: ContextSpecification(args=[ARG1])},
)
def test_command_builder_constructor_fail_on_two_contexts_both_required_and_specified():
context1, context2, context3 = (
Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1),
Context(name=CONTEXT2, help=CONTEXT_HELP_STRING2),
Context(name=CONTEXT3, help=CONTEXT_HELP_STRING3),
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^required and specified contexts clash "
rf"\({COMMAND1} -> required/specified\)$"
),
):
CommandBuilder(
name=COMMAND1,
help=COMMAND_HELP_STRING1,
required_contexts=[context1, context2, context3],
contexts_specifications={
context1: ContextSpecification(args=[ARG1]),
context2: ContextSpecification(args=[ARG2]),
},
)
def test_command_builder_fail_on_set_required_context_fail_on_preoccupied():
context = Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1)
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, allowed_contexts=[context]
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and required contexts clash "
rf"\({COMMAND1} -> allowed/required -> {CONTEXT1}\)$"
),
):
command_builder.required_contexts = [context]
def test_command_builder_fail_on_set_allowed_context_fail_on_preoccupied():
context = Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1)
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, required_contexts=[context]
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^allowed and required contexts clash "
rf"\({COMMAND1} -> allowed/required -> {CONTEXT1}\)$"
),
):
command_builder.allowed_contexts = [context]
def test_command_builder_fail_on_set_specified_context_fail_on_preoccupied():
context = Context(name=CONTEXT1, help=CONTEXT_HELP_STRING1)
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, required_contexts=[context]
)
with pytest.raises(
InconsistentConfiguration,
match=(
"^required and specified contexts clash "
rf"\({COMMAND1} -> required/specified -> {CONTEXT1}\)$"
),
):
command_builder.contexts_specifications = {
context: ContextSpecification(clear_args=True)
}
| 34.384335
| 88
| 0.678815
| 1,821
| 18,877
| 6.720483
| 0.041735
| 0.145285
| 0.083347
| 0.061121
| 0.92368
| 0.909381
| 0.867217
| 0.841477
| 0.819333
| 0.783461
| 0
| 0.025634
| 0.227102
| 18,877
| 548
| 89
| 34.44708
| 0.81316
| 0
| 0
| 0.731557
| 0
| 0
| 0.119193
| 0.044181
| 0
| 0
| 0
| 0
| 0.17418
| 1
| 0.040984
| false
| 0
| 0.014344
| 0
| 0.055328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2c39e0d2756754565318e2c81a6764e4a97d4a1
| 3,928
|
py
|
Python
|
tests/unit/dataactvalidator/test_a35_cross_file.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactvalidator/test_a35_cross_file.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactvalidator/test_a35_cross_file.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
from tests.unit.dataactcore.factories.staging import AppropriationFactory, ObjectClassProgramActivityFactory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
_FILE = 'a35_cross_file'
_TAS = 'a35_cross_file_tas'
def test_column_headers(database):
expected_subset = {'row_number', 'deobligations_recoveries_r_cpe', 'ussgl487100_downward_adjus_cpe_sum',
'ussgl497100_downward_adjus_cpe_sum', 'ussgl487200_downward_adjus_cpe_sum',
'ussgl497200_downward_adjus_cpe_sum'}
actual = set(query_columns(_FILE, database))
assert (actual & expected_subset) == expected_subset
def test_success(database):
""" Tests that, for entries with the matching TAS, Appropriations deobligations_recoveries_r_cpe equals the sum of
all corresponding entries for Object Class Program Acitivity fields ussgl487100_downward_adjus_cpe,
ussgl497100_downward_adjus_cpe, ussgl487200_downward_adjus_cpe, ussgl497200_downward_adjus_cpe"""
tas_ignore = ''.join([_TAS + "_ignore"])
ap = AppropriationFactory(tas=_TAS, deobligations_recoveries_r_cpe=8)
op_1 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
op_2 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
assert number_of_errors(_FILE, database, models=[ap, op_1, op_2]) == 0
ap = AppropriationFactory(tas=_TAS, deobligations_recoveries_r_cpe=8)
op_1 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
op_2 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
op_3 = ObjectClassProgramActivityFactory(tas=tas_ignore, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
assert number_of_errors(_FILE, database, models=[ap, op_1, op_2, op_3]) == 0
def test_failure(database):
""" Tests that, for entries with the matching TAS, Appropriations deobligations_recoveries_r_cpe does not equals
the sum of all corresponding entries for Object Class Program Acitivity fields ussgl487100_downward_adjus_cpe,
ussgl497100_downward_adjus_cpe, ussgl487200_downward_adjus_cpe, ussgl497200_downward_adjus_cpe"""
tas_ignore = ''.join([_TAS + "_ignore"])
ap = AppropriationFactory(tas=_TAS, deobligations_recoveries_r_cpe=7)
op_1 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1,
ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
op_2 = ObjectClassProgramActivityFactory(tas=_TAS, ussgl487100_downward_adjus_cpe=1,
ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
op_3 = ObjectClassProgramActivityFactory(tas=tas_ignore, ussgl487100_downward_adjus_cpe=1, ussgl497100_downward_adjus_cpe=1,
ussgl487200_downward_adjus_cpe=1, ussgl497200_downward_adjus_cpe=1)
assert number_of_errors(_FILE, database, models=[ap, op_1, op_2, op_3]) == 1
| 56.927536
| 128
| 0.727597
| 450
| 3,928
| 5.871111
| 0.16
| 0.216503
| 0.266465
| 0.205905
| 0.807343
| 0.807343
| 0.807343
| 0.807343
| 0.807343
| 0.807343
| 0
| 0.10367
| 0.209267
| 3,928
| 68
| 129
| 57.764706
| 0.746941
| 0.157841
| 0
| 0.526316
| 0
| 0
| 0.067704
| 0.050625
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.078947
| false
| 0
| 0.052632
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d2f5012146f10088ed49c2b113ab94e05725728d
| 65
|
py
|
Python
|
vit_pytorch/__init__.py
|
JiaJiunn/vit-pytorch
|
7bfaa4c192a92b5663ffbd8466f6e0bd7abba05b
|
[
"MIT"
] | null | null | null |
vit_pytorch/__init__.py
|
JiaJiunn/vit-pytorch
|
7bfaa4c192a92b5663ffbd8466f6e0bd7abba05b
|
[
"MIT"
] | null | null | null |
vit_pytorch/__init__.py
|
JiaJiunn/vit-pytorch
|
7bfaa4c192a92b5663ffbd8466f6e0bd7abba05b
|
[
"MIT"
] | null | null | null |
from vit_pytorch.vit import ViT
from vit_pytorch.dino import Dino
| 32.5
| 33
| 0.861538
| 12
| 65
| 4.5
| 0.416667
| 0.259259
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 65
| 2
| 33
| 32.5
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9602de41e131f1275a0d9bf87bb9594be7a83e4e
| 533,708
|
py
|
Python
|
libcst/matchers/__init__.py
|
jschavesr/LibCST
|
e5ab7b90b4c9cd1f46e5b875ad317411abf48298
|
[
"Apache-2.0"
] | 1
|
2022-02-10T10:59:22.000Z
|
2022-02-10T10:59:22.000Z
|
libcst/matchers/__init__.py
|
jschavesr/LibCST
|
e5ab7b90b4c9cd1f46e5b875ad317411abf48298
|
[
"Apache-2.0"
] | null | null | null |
libcst/matchers/__init__.py
|
jschavesr/LibCST
|
e5ab7b90b4c9cd1f46e5b875ad317411abf48298
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# This file was generated by libcst.codegen.gen_matcher_classes
from dataclasses import dataclass
from typing import Optional, Sequence, Union
from typing_extensions import Literal
import libcst as cst
from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit
from libcst.matchers._matcher_base import (
AbstractBaseMatcherNodeMeta,
AllOf,
AtLeastN,
AtMostN,
BaseMatcherNode,
DoesNotMatch,
DoNotCare,
DoNotCareSentinel,
extract,
extractall,
findall,
matches,
MatchIfTrue,
MatchMetadata,
MatchMetadataIfTrue,
MatchRegex,
OneOf,
replace,
SaveMatchedNode,
TypeOf,
ZeroOrMore,
ZeroOrOne,
)
from libcst.matchers._visitors import (
MatchDecoratorMismatch,
MatcherDecoratableTransformer,
MatcherDecoratableVisitor,
)
class _NodeABC(metaclass=AbstractBaseMatcherNodeMeta):
__slots__ = ()
class BaseAssignTargetExpression(_NodeABC):
pass
class BaseAugOp(_NodeABC):
pass
class BaseBinaryOp(_NodeABC):
pass
class BaseBooleanOp(_NodeABC):
pass
class BaseComp(_NodeABC):
pass
class BaseCompOp(_NodeABC):
pass
class BaseCompoundStatement(_NodeABC):
pass
class BaseDelTargetExpression(_NodeABC):
pass
class BaseDict(_NodeABC):
pass
class BaseDictElement(_NodeABC):
pass
class BaseElement(_NodeABC):
pass
class BaseExpression(_NodeABC):
pass
class BaseFormattedStringContent(_NodeABC):
pass
class BaseList(_NodeABC):
pass
class BaseMetadataProvider(_NodeABC):
pass
class BaseNumber(_NodeABC):
pass
class BaseParenthesizableWhitespace(_NodeABC):
pass
class BaseSet(_NodeABC):
pass
class BaseSimpleComp(_NodeABC):
pass
class BaseSlice(_NodeABC):
pass
class BaseSmallStatement(_NodeABC):
pass
class BaseStatement(_NodeABC):
pass
class BaseString(_NodeABC):
pass
class BaseSuite(_NodeABC):
pass
class BaseUnaryOp(_NodeABC):
pass
MetadataMatchType = Union[MatchMetadata, MatchMetadataIfTrue]
BaseParenthesizableWhitespaceMatchType = Union[
"BaseParenthesizableWhitespace",
MetadataMatchType,
MatchIfTrue[cst.BaseParenthesizableWhitespace],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Add(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AddAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class And(BaseBooleanOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseAssignTargetExpressionMatchType = Union[
"BaseAssignTargetExpression",
MetadataMatchType,
MatchIfTrue[cst.BaseAssignTargetExpression],
]
AnnotationMatchType = Union[
"Annotation", MetadataMatchType, MatchIfTrue[cst.Annotation]
]
AssignEqualMatchType = Union[
"AssignEqual", MetadataMatchType, MatchIfTrue[cst.AssignEqual]
]
SemicolonMatchType = Union["Semicolon", MetadataMatchType, MatchIfTrue[cst.Semicolon]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AnnAssign(BaseSmallStatement, BaseMatcherNode):
target: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
annotation: Union[
AnnotationMatchType,
DoNotCareSentinel,
OneOf[AnnotationMatchType],
AllOf[AnnotationMatchType],
] = DoNotCare()
value: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
equal: Union[
AssignEqualMatchType,
DoNotCareSentinel,
OneOf[AssignEqualMatchType],
AllOf[AssignEqualMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseExpressionMatchType = Union[
"BaseExpression", MetadataMatchType, MatchIfTrue[cst.BaseExpression]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Annotation(BaseMatcherNode):
annotation: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
whitespace_before_indicator: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_indicator: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
CommaMatchType = Union["Comma", MetadataMatchType, MatchIfTrue[cst.Comma]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Arg(BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
keyword: Union[
Optional["Name"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Name]],
DoNotCareSentinel,
OneOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
AllOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
] = DoNotCare()
equal: Union[
AssignEqualMatchType,
DoNotCareSentinel,
OneOf[AssignEqualMatchType],
AllOf[AssignEqualMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
star: Union[
Literal["", "*", "**"],
MetadataMatchType,
MatchIfTrue[Literal["", "*", "**"]],
DoNotCareSentinel,
OneOf[
Union[
Literal["", "*", "**"],
MetadataMatchType,
MatchIfTrue[Literal["", "*", "**"]],
]
],
AllOf[
Union[
Literal["", "*", "**"],
MetadataMatchType,
MatchIfTrue[Literal["", "*", "**"]],
]
],
] = DoNotCare()
whitespace_after_star: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_arg: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
NameOrTupleOrListMatchType = Union[
"Name",
"Tuple",
"List",
MetadataMatchType,
MatchIfTrue[Union[cst.Name, cst.Tuple, cst.List]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AsName(BaseMatcherNode):
name: Union[
NameOrTupleOrListMatchType,
DoNotCareSentinel,
OneOf[NameOrTupleOrListMatchType],
AllOf[NameOrTupleOrListMatchType],
] = DoNotCare()
whitespace_before_as: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_as: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
SimpleWhitespaceMatchType = Union[
"SimpleWhitespace", MetadataMatchType, MatchIfTrue[cst.SimpleWhitespace]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Assert(BaseSmallStatement, BaseMatcherNode):
test: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
msg: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_after_assert: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
AssignTargetMatchType = Union[
"AssignTarget", MetadataMatchType, MatchIfTrue[cst.AssignTarget]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Assign(BaseSmallStatement, BaseMatcherNode):
targets: Union[
Sequence[
Union[
AssignTargetMatchType,
DoNotCareSentinel,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
AtLeastN[
Union[
AssignTargetMatchType,
DoNotCareSentinel,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
AtMostN[
Union[
AssignTargetMatchType,
DoNotCareSentinel,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.AssignTarget]],
OneOf[
Union[
Sequence[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
AtLeastN[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
AtMostN[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.AssignTarget]],
]
],
AllOf[
Union[
Sequence[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
AtLeastN[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
AtMostN[
Union[
AssignTargetMatchType,
OneOf[AssignTargetMatchType],
AllOf[AssignTargetMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.AssignTarget]],
]
],
] = DoNotCare()
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AssignEqual(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AssignTarget(BaseMatcherNode):
target: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
whitespace_before_equal: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_equal: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Asynchronous(BaseMatcherNode):
whitespace_after: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
NameMatchType = Union["Name", MetadataMatchType, MatchIfTrue[cst.Name]]
DotMatchType = Union["Dot", MetadataMatchType, MatchIfTrue[cst.Dot]]
LeftParenMatchType = Union["LeftParen", MetadataMatchType, MatchIfTrue[cst.LeftParen]]
RightParenMatchType = Union[
"RightParen", MetadataMatchType, MatchIfTrue[cst.RightParen]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Attribute(
BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode
):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
attr: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
dot: Union[
DotMatchType, DoNotCareSentinel, OneOf[DotMatchType], AllOf[DotMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseAugOpMatchType = Union["BaseAugOp", MetadataMatchType, MatchIfTrue[cst.BaseAugOp]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class AugAssign(BaseSmallStatement, BaseMatcherNode):
target: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
operator: Union[
BaseAugOpMatchType,
DoNotCareSentinel,
OneOf[BaseAugOpMatchType],
AllOf[BaseAugOpMatchType],
] = DoNotCare()
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Await(BaseExpression, BaseMatcherNode):
expression: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_after_await: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseBinaryOpMatchType = Union[
"BaseBinaryOp", MetadataMatchType, MatchIfTrue[cst.BaseBinaryOp]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BinaryOperation(BaseExpression, BaseMatcherNode):
left: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
operator: Union[
BaseBinaryOpMatchType,
DoNotCareSentinel,
OneOf[BaseBinaryOpMatchType],
AllOf[BaseBinaryOpMatchType],
] = DoNotCare()
right: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitAnd(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitAndAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitInvert(BaseUnaryOp, BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitOr(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitOrAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitXor(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BitXorAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseBooleanOpMatchType = Union[
"BaseBooleanOp", MetadataMatchType, MatchIfTrue[cst.BaseBooleanOp]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class BooleanOperation(BaseExpression, BaseMatcherNode):
left: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
operator: Union[
BaseBooleanOpMatchType,
DoNotCareSentinel,
OneOf[BaseBooleanOpMatchType],
AllOf[BaseBooleanOpMatchType],
] = DoNotCare()
right: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Break(BaseSmallStatement, BaseMatcherNode):
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ArgMatchType = Union["Arg", MetadataMatchType, MatchIfTrue[cst.Arg]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Call(BaseExpression, BaseMatcherNode):
func: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
args: Union[
Sequence[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
AtMostN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Arg]],
OneOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
AllOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_after_func: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_args: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseSuiteMatchType = Union["BaseSuite", MetadataMatchType, MatchIfTrue[cst.BaseSuite]]
DecoratorMatchType = Union["Decorator", MetadataMatchType, MatchIfTrue[cst.Decorator]]
EmptyLineMatchType = Union["EmptyLine", MetadataMatchType, MatchIfTrue[cst.EmptyLine]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
name: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
bases: Union[
Sequence[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
AtMostN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Arg]],
OneOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
AllOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
] = DoNotCare()
keywords: Union[
Sequence[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
AtMostN[
Union[
ArgMatchType,
DoNotCareSentinel,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Arg]],
OneOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
AllOf[
Union[
Sequence[
Union[
ArgMatchType,
OneOf[ArgMatchType],
AllOf[ArgMatchType],
AtLeastN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
AtMostN[
Union[
ArgMatchType, OneOf[ArgMatchType], AllOf[ArgMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Arg]],
]
],
] = DoNotCare()
decorators: Union[
Sequence[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Decorator]],
OneOf[
Union[
Sequence[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Decorator]],
]
],
AllOf[
Union[
Sequence[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Decorator]],
]
],
] = DoNotCare()
lpar: Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
] = DoNotCare()
rpar: Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
lines_after_decorators: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_class: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_name: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Colon(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Comma(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
strMatchType = Union[str, MetadataMatchType, MatchIfTrue[str]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Comment(BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
CompIfMatchType = Union["CompIf", MetadataMatchType, MatchIfTrue[cst.CompIf]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class CompFor(BaseMatcherNode):
target: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
iter: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
ifs: Union[
Sequence[
Union[
CompIfMatchType,
DoNotCareSentinel,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
AtLeastN[
Union[
CompIfMatchType,
DoNotCareSentinel,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
AtMostN[
Union[
CompIfMatchType,
DoNotCareSentinel,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.CompIf]],
OneOf[
Union[
Sequence[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
AtLeastN[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
AtMostN[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.CompIf]],
]
],
AllOf[
Union[
Sequence[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
AtLeastN[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
AtMostN[
Union[
CompIfMatchType,
OneOf[CompIfMatchType],
AllOf[CompIfMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.CompIf]],
]
],
] = DoNotCare()
inner_for_in: Union[
Optional["CompFor"],
MetadataMatchType,
MatchIfTrue[Optional[cst.CompFor]],
DoNotCareSentinel,
OneOf[
Union[
Optional["CompFor"],
MetadataMatchType,
MatchIfTrue[Optional[cst.CompFor]],
]
],
AllOf[
Union[
Optional["CompFor"],
MetadataMatchType,
MatchIfTrue[Optional[cst.CompFor]],
]
],
] = DoNotCare()
asynchronous: Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
AllOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
] = DoNotCare()
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_for: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_in: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_in: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class CompIf(BaseMatcherNode):
test: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_test: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ComparisonTargetMatchType = Union[
"ComparisonTarget", MetadataMatchType, MatchIfTrue[cst.ComparisonTarget]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Comparison(BaseExpression, BaseMatcherNode):
left: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
comparisons: Union[
Sequence[
Union[
ComparisonTargetMatchType,
DoNotCareSentinel,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
AtLeastN[
Union[
ComparisonTargetMatchType,
DoNotCareSentinel,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
AtMostN[
Union[
ComparisonTargetMatchType,
DoNotCareSentinel,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.ComparisonTarget]],
OneOf[
Union[
Sequence[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
AtLeastN[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
AtMostN[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ComparisonTarget]],
]
],
AllOf[
Union[
Sequence[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
AtLeastN[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
AtMostN[
Union[
ComparisonTargetMatchType,
OneOf[ComparisonTargetMatchType],
AllOf[ComparisonTargetMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ComparisonTarget]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseCompOpMatchType = Union[
"BaseCompOp", MetadataMatchType, MatchIfTrue[cst.BaseCompOp]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ComparisonTarget(BaseMatcherNode):
operator: Union[
BaseCompOpMatchType,
DoNotCareSentinel,
OneOf[BaseCompOpMatchType],
AllOf[BaseCompOpMatchType],
] = DoNotCare()
comparator: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
SimpleStringOrFormattedStringMatchType = Union[
"SimpleString",
"FormattedString",
MetadataMatchType,
MatchIfTrue[Union[cst.SimpleString, cst.FormattedString]],
]
SimpleStringOrFormattedStringOrConcatenatedStringMatchType = Union[
"SimpleString",
"FormattedString",
"ConcatenatedString",
MetadataMatchType,
MatchIfTrue[Union[cst.SimpleString, cst.FormattedString, cst.ConcatenatedString]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ConcatenatedString(BaseExpression, BaseString, BaseMatcherNode):
left: Union[
SimpleStringOrFormattedStringMatchType,
DoNotCareSentinel,
OneOf[SimpleStringOrFormattedStringMatchType],
AllOf[SimpleStringOrFormattedStringMatchType],
] = DoNotCare()
right: Union[
SimpleStringOrFormattedStringOrConcatenatedStringMatchType,
DoNotCareSentinel,
OneOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType],
AllOf[SimpleStringOrFormattedStringOrConcatenatedStringMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_between: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Continue(BaseSmallStatement, BaseMatcherNode):
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
NameOrAttributeOrCallMatchType = Union[
"Name",
"Attribute",
"Call",
MetadataMatchType,
MatchIfTrue[Union[cst.Name, cst.Attribute, cst.Call]],
]
TrailingWhitespaceMatchType = Union[
"TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Decorator(BaseMatcherNode):
decorator: Union[
NameOrAttributeOrCallMatchType,
DoNotCareSentinel,
OneOf[NameOrAttributeOrCallMatchType],
AllOf[NameOrAttributeOrCallMatchType],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_at: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
trailing_whitespace: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseDelTargetExpressionMatchType = Union[
"BaseDelTargetExpression",
MetadataMatchType,
MatchIfTrue[cst.BaseDelTargetExpression],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Del(BaseSmallStatement, BaseMatcherNode):
target: Union[
BaseDelTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseDelTargetExpressionMatchType],
AllOf[BaseDelTargetExpressionMatchType],
] = DoNotCare()
whitespace_after_del: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseDictElementMatchType = Union[
"BaseDictElement", MetadataMatchType, MatchIfTrue[cst.BaseDictElement]
]
LeftCurlyBraceMatchType = Union[
"LeftCurlyBrace", MetadataMatchType, MatchIfTrue[cst.LeftCurlyBrace]
]
RightCurlyBraceMatchType = Union[
"RightCurlyBrace", MetadataMatchType, MatchIfTrue[cst.RightCurlyBrace]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Dict(BaseDict, BaseExpression, BaseMatcherNode):
elements: Union[
Sequence[
Union[
BaseDictElementMatchType,
DoNotCareSentinel,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
AtLeastN[
Union[
BaseDictElementMatchType,
DoNotCareSentinel,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
AtMostN[
Union[
BaseDictElementMatchType,
DoNotCareSentinel,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseDictElement]],
OneOf[
Union[
Sequence[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
AtLeastN[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
AtMostN[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseDictElement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
AtLeastN[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
AtMostN[
Union[
BaseDictElementMatchType,
OneOf[BaseDictElementMatchType],
AllOf[BaseDictElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseDictElement]],
]
],
] = DoNotCare()
lbrace: Union[
LeftCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[LeftCurlyBraceMatchType],
AllOf[LeftCurlyBraceMatchType],
] = DoNotCare()
rbrace: Union[
RightCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[RightCurlyBraceMatchType],
AllOf[RightCurlyBraceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode):
key: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
value: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
for_in: Union[
CompForMatchType,
DoNotCareSentinel,
OneOf[CompForMatchType],
AllOf[CompForMatchType],
] = DoNotCare()
lbrace: Union[
LeftCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[LeftCurlyBraceMatchType],
AllOf[LeftCurlyBraceMatchType],
] = DoNotCare()
rbrace: Union[
RightCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[RightCurlyBraceMatchType],
AllOf[RightCurlyBraceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_before_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class DictElement(BaseDictElement, BaseMatcherNode):
key: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_before_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Divide(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class DivideAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Dot(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Element(BaseElement, BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Ellipsis(BaseExpression, BaseMatcherNode):
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Else(BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
boolMatchType = Union[bool, MetadataMatchType, MatchIfTrue[bool]]
NewlineMatchType = Union["Newline", MetadataMatchType, MatchIfTrue[cst.Newline]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class EmptyLine(BaseMatcherNode):
indent: Union[
boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType]
] = DoNotCare()
whitespace: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
comment: Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
]
],
AllOf[
Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
]
],
] = DoNotCare()
newline: Union[
NewlineMatchType,
DoNotCareSentinel,
OneOf[NewlineMatchType],
AllOf[NewlineMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Equal(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ExceptHandler(BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
type: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
name: Union[
Optional["AsName"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AsName]],
DoNotCareSentinel,
OneOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
AllOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_except: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ExceptStarHandler(BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
type: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
name: Union[
Optional["AsName"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AsName]],
DoNotCareSentinel,
OneOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
AllOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_except: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_star: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Expr(BaseSmallStatement, BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Finally(BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Float(BaseExpression, BaseNumber, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FloorDivide(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FloorDivideAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class For(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
target: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
iter: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
orelse: Union[
Optional["Else"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Else]],
DoNotCareSentinel,
OneOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
AllOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
] = DoNotCare()
asynchronous: Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
AllOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_for: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_in: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_in: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseFormattedStringContentMatchType = Union[
"BaseFormattedStringContent",
MetadataMatchType,
MatchIfTrue[cst.BaseFormattedStringContent],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FormattedString(BaseExpression, BaseString, BaseMatcherNode):
parts: Union[
Sequence[
Union[
BaseFormattedStringContentMatchType,
DoNotCareSentinel,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
AtLeastN[
Union[
BaseFormattedStringContentMatchType,
DoNotCareSentinel,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
AtMostN[
Union[
BaseFormattedStringContentMatchType,
DoNotCareSentinel,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseFormattedStringContent]],
OneOf[
Union[
Sequence[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
AtLeastN[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
AtMostN[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseFormattedStringContent]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
AtLeastN[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
AtMostN[
Union[
BaseFormattedStringContentMatchType,
OneOf[BaseFormattedStringContentMatchType],
AllOf[BaseFormattedStringContentMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseFormattedStringContent]],
]
],
] = DoNotCare()
start: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
end: Union[
Literal['"', "'", '"""', "'''"],
MetadataMatchType,
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
DoNotCareSentinel,
OneOf[
Union[
Literal['"', "'", '"""', "'''"],
MetadataMatchType,
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
]
],
AllOf[
Union[
Literal['"', "'", '"""', "'''"],
MetadataMatchType,
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FormattedStringExpression(BaseFormattedStringContent, BaseMatcherNode):
expression: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
conversion: Union[
Optional[str],
MetadataMatchType,
MatchIfTrue[Optional[str]],
DoNotCareSentinel,
OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
] = DoNotCare()
format_spec: Union[
Optional[Sequence["BaseFormattedStringContent"]],
MetadataMatchType,
MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]],
DoNotCareSentinel,
OneOf[
Union[
Optional[Sequence["BaseFormattedStringContent"]],
MetadataMatchType,
MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]],
]
],
AllOf[
Union[
Optional[Sequence["BaseFormattedStringContent"]],
MetadataMatchType,
MatchIfTrue[Optional[Sequence[cst.BaseFormattedStringContent]]],
]
],
] = DoNotCare()
whitespace_before_expression: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_expression: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
equal: Union[
Optional["AssignEqual"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AssignEqual]],
DoNotCareSentinel,
OneOf[
Union[
Optional["AssignEqual"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AssignEqual]],
]
],
AllOf[
Union[
Optional["AssignEqual"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AssignEqual]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FormattedStringText(BaseFormattedStringContent, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class From(BaseMatcherNode):
item: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
whitespace_before_from: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_from: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ParametersMatchType = Union[
"Parameters", MetadataMatchType, MatchIfTrue[cst.Parameters]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
name: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
params: Union[
ParametersMatchType,
DoNotCareSentinel,
OneOf[ParametersMatchType],
AllOf[ParametersMatchType],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
decorators: Union[
Sequence[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
DoNotCareSentinel,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Decorator]],
OneOf[
Union[
Sequence[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Decorator]],
]
],
AllOf[
Union[
Sequence[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
AtLeastN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
AtMostN[
Union[
DecoratorMatchType,
OneOf[DecoratorMatchType],
AllOf[DecoratorMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Decorator]],
]
],
] = DoNotCare()
returns: Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
]
],
AllOf[
Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
]
],
] = DoNotCare()
asynchronous: Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
AllOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
lines_after_decorators: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_def: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_name: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_params: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode):
elt: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
for_in: Union[
CompForMatchType,
DoNotCareSentinel,
OneOf[CompForMatchType],
AllOf[CompForMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
NameItemMatchType = Union["NameItem", MetadataMatchType, MatchIfTrue[cst.NameItem]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Global(BaseSmallStatement, BaseMatcherNode):
names: Union[
Sequence[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.NameItem]],
OneOf[
Union[
Sequence[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.NameItem]],
]
],
AllOf[
Union[
Sequence[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.NameItem]],
]
],
] = DoNotCare()
whitespace_after_global: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class GreaterThan(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class GreaterThanEqual(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
IfOrElseOrNoneMatchType = Union[
"If", "Else", None, MetadataMatchType, MatchIfTrue[Union[cst.If, cst.Else, None]]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class If(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
test: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
orelse: Union[
IfOrElseOrNoneMatchType,
DoNotCareSentinel,
OneOf[IfOrElseOrNoneMatchType],
AllOf[IfOrElseOrNoneMatchType],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_before_test: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_test: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class IfExp(BaseExpression, BaseMatcherNode):
test: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
body: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
orelse: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_before_if: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_if: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_else: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_else: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Imaginary(BaseExpression, BaseNumber, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ImportAliasMatchType = Union[
"ImportAlias", MetadataMatchType, MatchIfTrue[cst.ImportAlias]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Import(BaseSmallStatement, BaseMatcherNode):
names: Union[
Sequence[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.ImportAlias]],
OneOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
AllOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
whitespace_after_import: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
AttributeOrNameMatchType = Union[
"Attribute", "Name", MetadataMatchType, MatchIfTrue[Union[cst.Attribute, cst.Name]]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ImportAlias(BaseMatcherNode):
name: Union[
AttributeOrNameMatchType,
DoNotCareSentinel,
OneOf[AttributeOrNameMatchType],
AllOf[AttributeOrNameMatchType],
] = DoNotCare()
asname: Union[
Optional["AsName"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AsName]],
DoNotCareSentinel,
OneOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
AllOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
AttributeOrNameOrNoneMatchType = Union[
"Attribute",
"Name",
None,
MetadataMatchType,
MatchIfTrue[Union[cst.Attribute, cst.Name, None]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ImportFrom(BaseSmallStatement, BaseMatcherNode):
module: Union[
AttributeOrNameOrNoneMatchType,
DoNotCareSentinel,
OneOf[AttributeOrNameOrNoneMatchType],
AllOf[AttributeOrNameOrNoneMatchType],
] = DoNotCare()
names: Union[
Union[
Sequence[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
DoNotCareSentinel,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.ImportAlias]],
OneOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
AllOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
],
"ImportStar",
MetadataMatchType,
MatchIfTrue[
Union[
Sequence[cst.ImportAlias],
cst.ImportStar,
OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
]
],
DoNotCareSentinel,
OneOf[
Union[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
OneOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
AllOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
],
"ImportStar",
MetadataMatchType,
MatchIfTrue[
Union[
Sequence[cst.ImportAlias],
cst.ImportStar,
OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
]
],
]
],
AllOf[
Union[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
OneOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
AllOf[
Union[
Sequence[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
AtLeastN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
AtMostN[
Union[
ImportAliasMatchType,
OneOf[ImportAliasMatchType],
AllOf[ImportAliasMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ImportAlias]],
]
],
],
"ImportStar",
MetadataMatchType,
MatchIfTrue[
Union[
Sequence[cst.ImportAlias],
cst.ImportStar,
OneOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
AllOf[Union[Sequence[cst.ImportAlias], cst.ImportStar]],
]
],
]
],
] = DoNotCare()
relative: Union[
Sequence[
Union[
DotMatchType,
DoNotCareSentinel,
OneOf[DotMatchType],
AllOf[DotMatchType],
AtLeastN[
Union[
DotMatchType,
DoNotCareSentinel,
OneOf[DotMatchType],
AllOf[DotMatchType],
]
],
AtMostN[
Union[
DotMatchType,
DoNotCareSentinel,
OneOf[DotMatchType],
AllOf[DotMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Dot]],
OneOf[
Union[
Sequence[
Union[
DotMatchType,
OneOf[DotMatchType],
AllOf[DotMatchType],
AtLeastN[
Union[
DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType]
]
],
AtMostN[
Union[
DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Dot]],
]
],
AllOf[
Union[
Sequence[
Union[
DotMatchType,
OneOf[DotMatchType],
AllOf[DotMatchType],
AtLeastN[
Union[
DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType]
]
],
AtMostN[
Union[
DotMatchType, OneOf[DotMatchType], AllOf[DotMatchType]
]
],
]
],
MatchIfTrue[Sequence[cst.Dot]],
]
],
] = DoNotCare()
lpar: Union[
Optional["LeftParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftParen]],
DoNotCareSentinel,
OneOf[
Union[
Optional["LeftParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftParen]],
]
],
AllOf[
Union[
Optional["LeftParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Optional["RightParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightParen]],
DoNotCareSentinel,
OneOf[
Union[
Optional["RightParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightParen]],
]
],
AllOf[
Union[
Optional["RightParen"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightParen]],
]
],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
whitespace_after_from: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_import: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_import: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ImportStar(BaseMatcherNode):
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class In(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseStatementMatchType = Union[
"BaseStatement", MetadataMatchType, MatchIfTrue[cst.BaseStatement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class IndentedBlock(BaseSuite, BaseMatcherNode):
body: Union[
Sequence[
Union[
BaseStatementMatchType,
DoNotCareSentinel,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
AtLeastN[
Union[
BaseStatementMatchType,
DoNotCareSentinel,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
AtMostN[
Union[
BaseStatementMatchType,
DoNotCareSentinel,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseStatement]],
OneOf[
Union[
Sequence[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
AtLeastN[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
AtMostN[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseStatement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
AtLeastN[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
AtMostN[
Union[
BaseStatementMatchType,
OneOf[BaseStatementMatchType],
AllOf[BaseStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseStatement]],
]
],
] = DoNotCare()
header: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
indent: Union[
Optional[str],
MetadataMatchType,
MatchIfTrue[Optional[str]],
DoNotCareSentinel,
OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
] = DoNotCare()
footer: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Index(BaseSlice, BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Integer(BaseExpression, BaseNumber, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Is(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class IsNot(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_between: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ColonMatchType = Union["Colon", MetadataMatchType, MatchIfTrue[cst.Colon]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Lambda(BaseExpression, BaseMatcherNode):
params: Union[
ParametersMatchType,
DoNotCareSentinel,
OneOf[ParametersMatchType],
AllOf[ParametersMatchType],
] = DoNotCare()
body: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
colon: Union[
ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_after_lambda: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LeftCurlyBrace(BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LeftParen(BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LeftShift(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LeftShiftAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LeftSquareBracket(BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LessThan(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class LessThanEqual(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseElementMatchType = Union[
"BaseElement", MetadataMatchType, MatchIfTrue[cst.BaseElement]
]
LeftSquareBracketMatchType = Union[
"LeftSquareBracket", MetadataMatchType, MatchIfTrue[cst.LeftSquareBracket]
]
RightSquareBracketMatchType = Union[
"RightSquareBracket", MetadataMatchType, MatchIfTrue[cst.RightSquareBracket]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class List(
BaseAssignTargetExpression,
BaseDelTargetExpression,
BaseExpression,
BaseList,
BaseMatcherNode,
):
elements: Union[
Sequence[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseElement]],
OneOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
] = DoNotCare()
lbracket: Union[
LeftSquareBracketMatchType,
DoNotCareSentinel,
OneOf[LeftSquareBracketMatchType],
AllOf[LeftSquareBracketMatchType],
] = DoNotCare()
rbracket: Union[
RightSquareBracketMatchType,
DoNotCareSentinel,
OneOf[RightSquareBracketMatchType],
AllOf[RightSquareBracketMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode):
elt: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
for_in: Union[
CompForMatchType,
DoNotCareSentinel,
OneOf[CompForMatchType],
AllOf[CompForMatchType],
] = DoNotCare()
lbracket: Union[
LeftSquareBracketMatchType,
DoNotCareSentinel,
OneOf[LeftSquareBracketMatchType],
AllOf[LeftSquareBracketMatchType],
] = DoNotCare()
rbracket: Union[
RightSquareBracketMatchType,
DoNotCareSentinel,
OneOf[RightSquareBracketMatchType],
AllOf[RightSquareBracketMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchCaseMatchType = Union["MatchCase", MetadataMatchType, MatchIfTrue[cst.MatchCase]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Match(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
subject: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
cases: Union[
Sequence[
Union[
MatchCaseMatchType,
DoNotCareSentinel,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
AtLeastN[
Union[
MatchCaseMatchType,
DoNotCareSentinel,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
AtMostN[
Union[
MatchCaseMatchType,
DoNotCareSentinel,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.MatchCase]],
OneOf[
Union[
Sequence[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
AtLeastN[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
AtMostN[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchCase]],
]
],
AllOf[
Union[
Sequence[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
AtLeastN[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
AtMostN[
Union[
MatchCaseMatchType,
OneOf[MatchCaseMatchType],
AllOf[MatchCaseMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchCase]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_match: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_colon: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
indent: Union[
Optional[str],
MetadataMatchType,
MatchIfTrue[Optional[str]],
DoNotCareSentinel,
OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
] = DoNotCare()
footer: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchAs(BaseMatcherNode):
pattern: Union[
Optional["MatchPattern"],
MetadataMatchType,
MatchIfTrue[Optional[cst.MatchPattern]],
DoNotCareSentinel,
OneOf[
Union[
Optional["MatchPattern"],
MetadataMatchType,
MatchIfTrue[Optional[cst.MatchPattern]],
]
],
AllOf[
Union[
Optional["MatchPattern"],
MetadataMatchType,
MatchIfTrue[Optional[cst.MatchPattern]],
]
],
] = DoNotCare()
name: Union[
Optional["Name"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Name]],
DoNotCareSentinel,
OneOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
AllOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
] = DoNotCare()
whitespace_before_as: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_as: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchPatternMatchType = Union[
"MatchPattern", MetadataMatchType, MatchIfTrue[cst.MatchPattern]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchCase(BaseMatcherNode):
pattern: Union[
MatchPatternMatchType,
DoNotCareSentinel,
OneOf[MatchPatternMatchType],
AllOf[MatchPatternMatchType],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
guard: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_case: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_if: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_after_if: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchSequenceElementMatchType = Union[
"MatchSequenceElement", MetadataMatchType, MatchIfTrue[cst.MatchSequenceElement]
]
MatchKeywordElementMatchType = Union[
"MatchKeywordElement", MetadataMatchType, MatchIfTrue[cst.MatchKeywordElement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchClass(BaseMatcherNode):
cls: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
patterns: Union[
Sequence[
Union[
MatchSequenceElementMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
AtLeastN[
Union[
MatchSequenceElementMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.MatchSequenceElement]],
OneOf[
Union[
Sequence[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
AtLeastN[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchSequenceElement]],
]
],
AllOf[
Union[
Sequence[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
AtLeastN[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementMatchType,
OneOf[MatchSequenceElementMatchType],
AllOf[MatchSequenceElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchSequenceElement]],
]
],
] = DoNotCare()
kwds: Union[
Sequence[
Union[
MatchKeywordElementMatchType,
DoNotCareSentinel,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
AtLeastN[
Union[
MatchKeywordElementMatchType,
DoNotCareSentinel,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
AtMostN[
Union[
MatchKeywordElementMatchType,
DoNotCareSentinel,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.MatchKeywordElement]],
OneOf[
Union[
Sequence[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
AtLeastN[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
AtMostN[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchKeywordElement]],
]
],
AllOf[
Union[
Sequence[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
AtLeastN[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
AtMostN[
Union[
MatchKeywordElementMatchType,
OneOf[MatchKeywordElementMatchType],
AllOf[MatchKeywordElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchKeywordElement]],
]
],
] = DoNotCare()
whitespace_after_cls: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_before_patterns: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_kwds: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchKeywordElement(BaseMatcherNode):
key: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
pattern: Union[
MatchPatternMatchType,
DoNotCareSentinel,
OneOf[MatchPatternMatchType],
AllOf[MatchPatternMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_before_equal: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_equal: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchSequenceElementOrMatchStarMatchType = Union[
"MatchSequenceElement",
"MatchStar",
MetadataMatchType,
MatchIfTrue[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchList(BaseMatcherNode):
patterns: Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
OneOf[
Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
]
],
AllOf[
Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
]
],
] = DoNotCare()
lbracket: Union[
Optional["LeftSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftSquareBracket]],
DoNotCareSentinel,
OneOf[
Union[
Optional["LeftSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftSquareBracket]],
]
],
AllOf[
Union[
Optional["LeftSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.LeftSquareBracket]],
]
],
] = DoNotCare()
rbracket: Union[
Optional["RightSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightSquareBracket]],
DoNotCareSentinel,
OneOf[
Union[
Optional["RightSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightSquareBracket]],
]
],
AllOf[
Union[
Optional["RightSquareBracket"],
MetadataMatchType,
MatchIfTrue[Optional[cst.RightSquareBracket]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchMappingElementMatchType = Union[
"MatchMappingElement", MetadataMatchType, MatchIfTrue[cst.MatchMappingElement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchMapping(BaseMatcherNode):
elements: Union[
Sequence[
Union[
MatchMappingElementMatchType,
DoNotCareSentinel,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
AtLeastN[
Union[
MatchMappingElementMatchType,
DoNotCareSentinel,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
AtMostN[
Union[
MatchMappingElementMatchType,
DoNotCareSentinel,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.MatchMappingElement]],
OneOf[
Union[
Sequence[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
AtLeastN[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
AtMostN[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchMappingElement]],
]
],
AllOf[
Union[
Sequence[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
AtLeastN[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
AtMostN[
Union[
MatchMappingElementMatchType,
OneOf[MatchMappingElementMatchType],
AllOf[MatchMappingElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchMappingElement]],
]
],
] = DoNotCare()
lbrace: Union[
LeftCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[LeftCurlyBraceMatchType],
AllOf[LeftCurlyBraceMatchType],
] = DoNotCare()
rbrace: Union[
RightCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[RightCurlyBraceMatchType],
AllOf[RightCurlyBraceMatchType],
] = DoNotCare()
rest: Union[
Optional["Name"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Name]],
DoNotCareSentinel,
OneOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
AllOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
] = DoNotCare()
whitespace_before_rest: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
trailing_comma: Union[
Optional["Comma"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comma]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]]
]
],
AllOf[
Union[
Optional["Comma"], MetadataMatchType, MatchIfTrue[Optional[cst.Comma]]
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchMappingElement(BaseMatcherNode):
key: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
pattern: Union[
MatchPatternMatchType,
DoNotCareSentinel,
OneOf[MatchPatternMatchType],
AllOf[MatchPatternMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_before_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_colon: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
MatchOrElementMatchType = Union[
"MatchOrElement", MetadataMatchType, MatchIfTrue[cst.MatchOrElement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchOr(BaseMatcherNode):
patterns: Union[
Sequence[
Union[
MatchOrElementMatchType,
DoNotCareSentinel,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
AtLeastN[
Union[
MatchOrElementMatchType,
DoNotCareSentinel,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
AtMostN[
Union[
MatchOrElementMatchType,
DoNotCareSentinel,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.MatchOrElement]],
OneOf[
Union[
Sequence[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
AtLeastN[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
AtMostN[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchOrElement]],
]
],
AllOf[
Union[
Sequence[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
AtLeastN[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
AtMostN[
Union[
MatchOrElementMatchType,
OneOf[MatchOrElementMatchType],
AllOf[MatchOrElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.MatchOrElement]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BitOrMatchType = Union["BitOr", MetadataMatchType, MatchIfTrue[cst.BitOr]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchOrElement(BaseMatcherNode):
pattern: Union[
MatchPatternMatchType,
DoNotCareSentinel,
OneOf[MatchPatternMatchType],
AllOf[MatchPatternMatchType],
] = DoNotCare()
separator: Union[
BitOrMatchType, DoNotCareSentinel, OneOf[BitOrMatchType], AllOf[BitOrMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchPattern(BaseMatcherNode):
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchSequence(BaseMatcherNode):
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchSequenceElement(BaseMatcherNode):
value: Union[
MatchPatternMatchType,
DoNotCareSentinel,
OneOf[MatchPatternMatchType],
AllOf[MatchPatternMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchSingleton(BaseMatcherNode):
value: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchStar(BaseMatcherNode):
name: Union[
Optional["Name"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Name]],
DoNotCareSentinel,
OneOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
AllOf[
Union[Optional["Name"], MetadataMatchType, MatchIfTrue[Optional[cst.Name]]]
],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_before_name: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchTuple(BaseMatcherNode):
patterns: Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
DoNotCareSentinel,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
OneOf[
Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
]
],
AllOf[
Union[
Sequence[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
AtLeastN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
AtMostN[
Union[
MatchSequenceElementOrMatchStarMatchType,
OneOf[MatchSequenceElementOrMatchStarMatchType],
AllOf[MatchSequenceElementOrMatchStarMatchType],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.MatchSequenceElement,
cst.MatchStar,
OneOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
AllOf[Union[cst.MatchSequenceElement, cst.MatchStar]],
]
]
],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatchValue(BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatrixMultiply(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MatrixMultiplyAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Minus(BaseUnaryOp, BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
SimpleStatementLineOrBaseCompoundStatementMatchType = Union[
"SimpleStatementLine",
"BaseCompoundStatement",
MetadataMatchType,
MatchIfTrue[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Module(BaseMatcherNode):
body: Union[
Sequence[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
DoNotCareSentinel,
OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AtLeastN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
DoNotCareSentinel,
OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
]
],
AtMostN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
DoNotCareSentinel,
OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[
Sequence[
Union[
cst.SimpleStatementLine,
cst.BaseCompoundStatement,
OneOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]],
AllOf[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]],
]
]
],
OneOf[
Union[
Sequence[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AtLeastN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
AllOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
]
],
AtMostN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
AllOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.SimpleStatementLine,
cst.BaseCompoundStatement,
OneOf[
Union[
cst.SimpleStatementLine, cst.BaseCompoundStatement
]
],
AllOf[
Union[
cst.SimpleStatementLine, cst.BaseCompoundStatement
]
],
]
]
],
]
],
AllOf[
Union[
Sequence[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AllOf[SimpleStatementLineOrBaseCompoundStatementMatchType],
AtLeastN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
AllOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
]
],
AtMostN[
Union[
SimpleStatementLineOrBaseCompoundStatementMatchType,
OneOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
AllOf[
SimpleStatementLineOrBaseCompoundStatementMatchType
],
]
],
]
],
MatchIfTrue[
Sequence[
Union[
cst.SimpleStatementLine,
cst.BaseCompoundStatement,
OneOf[
Union[
cst.SimpleStatementLine, cst.BaseCompoundStatement
]
],
AllOf[
Union[
cst.SimpleStatementLine, cst.BaseCompoundStatement
]
],
]
]
],
]
],
] = DoNotCare()
header: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
footer: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
encoding: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
default_indent: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
default_newline: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
has_trailing_newline: Union[
boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Modulo(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ModuloAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Multiply(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class MultiplyAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Name(
BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode
):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class NameItem(BaseMatcherNode):
name: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class NamedExpr(BaseExpression, BaseMatcherNode):
target: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_before_walrus: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_walrus: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Newline(BaseMatcherNode):
value: Union[
Optional[str],
MetadataMatchType,
MatchIfTrue[Optional[str]],
DoNotCareSentinel,
OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Nonlocal(BaseSmallStatement, BaseMatcherNode):
names: Union[
Sequence[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
DoNotCareSentinel,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.NameItem]],
OneOf[
Union[
Sequence[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.NameItem]],
]
],
AllOf[
Union[
Sequence[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
AtLeastN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
AtMostN[
Union[
NameItemMatchType,
OneOf[NameItemMatchType],
AllOf[NameItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.NameItem]],
]
],
] = DoNotCare()
whitespace_after_nonlocal: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Not(BaseUnaryOp, BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class NotEqual(BaseCompOp, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class NotIn(BaseCompOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_between: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Or(BaseBooleanOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Param(BaseMatcherNode):
name: Union[
NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType]
] = DoNotCare()
annotation: Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
]
],
AllOf[
Union[
Optional["Annotation"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Annotation]],
]
],
] = DoNotCare()
equal: Union[
AssignEqualMatchType,
DoNotCareSentinel,
OneOf[AssignEqualMatchType],
AllOf[AssignEqualMatchType],
] = DoNotCare()
default: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
star: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
whitespace_after_star: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after_param: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ParamSlash(BaseMatcherNode):
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ParamStar(BaseMatcherNode):
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ParamMatchType = Union["Param", MetadataMatchType, MatchIfTrue[cst.Param]]
ParamOrParamStarMatchType = Union[
"Param",
"ParamStar",
MetadataMatchType,
MatchIfTrue[Union[cst.Param, cst.ParamStar]],
]
ParamSlashMatchType = Union[
"ParamSlash", MetadataMatchType, MatchIfTrue[cst.ParamSlash]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Parameters(BaseMatcherNode):
params: Union[
Sequence[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Param]],
OneOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
AllOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
] = DoNotCare()
star_arg: Union[
ParamOrParamStarMatchType,
DoNotCareSentinel,
OneOf[ParamOrParamStarMatchType],
AllOf[ParamOrParamStarMatchType],
] = DoNotCare()
kwonly_params: Union[
Sequence[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Param]],
OneOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
AllOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
] = DoNotCare()
star_kwarg: Union[
Optional["Param"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Param]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]]
]
],
AllOf[
Union[
Optional["Param"], MetadataMatchType, MatchIfTrue[Optional[cst.Param]]
]
],
] = DoNotCare()
posonly_params: Union[
Sequence[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
DoNotCareSentinel,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.Param]],
OneOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
AllOf[
Union[
Sequence[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
AtLeastN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
AtMostN[
Union[
ParamMatchType,
OneOf[ParamMatchType],
AllOf[ParamMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.Param]],
]
],
] = DoNotCare()
posonly_ind: Union[
ParamSlashMatchType,
DoNotCareSentinel,
OneOf[ParamSlashMatchType],
AllOf[ParamSlashMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class ParenthesizedWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode):
first_line: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
empty_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
indent: Union[
boolMatchType, DoNotCareSentinel, OneOf[boolMatchType], AllOf[boolMatchType]
] = DoNotCare()
last_line: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Pass(BaseSmallStatement, BaseMatcherNode):
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Plus(BaseUnaryOp, BaseMatcherNode):
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Power(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class PowerAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Raise(BaseSmallStatement, BaseMatcherNode):
exc: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
cause: Union[
Optional["From"],
MetadataMatchType,
MatchIfTrue[Optional[cst.From]],
DoNotCareSentinel,
OneOf[
Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]]
],
AllOf[
Union[Optional["From"], MetadataMatchType, MatchIfTrue[Optional[cst.From]]]
],
] = DoNotCare()
whitespace_after_raise: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Return(BaseSmallStatement, BaseMatcherNode):
value: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
whitespace_after_return: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
semicolon: Union[
SemicolonMatchType,
DoNotCareSentinel,
OneOf[SemicolonMatchType],
AllOf[SemicolonMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class RightCurlyBrace(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class RightParen(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class RightShift(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class RightShiftAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class RightSquareBracket(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Semicolon(BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Set(BaseExpression, BaseSet, BaseMatcherNode):
elements: Union[
Sequence[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseElement]],
OneOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
] = DoNotCare()
lbrace: Union[
LeftCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[LeftCurlyBraceMatchType],
AllOf[LeftCurlyBraceMatchType],
] = DoNotCare()
rbrace: Union[
RightCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[RightCurlyBraceMatchType],
AllOf[RightCurlyBraceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode):
elt: Union[
BaseAssignTargetExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseAssignTargetExpressionMatchType],
AllOf[BaseAssignTargetExpressionMatchType],
] = DoNotCare()
for_in: Union[
CompForMatchType,
DoNotCareSentinel,
OneOf[CompForMatchType],
AllOf[CompForMatchType],
] = DoNotCare()
lbrace: Union[
LeftCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[LeftCurlyBraceMatchType],
AllOf[LeftCurlyBraceMatchType],
] = DoNotCare()
rbrace: Union[
RightCurlyBraceMatchType,
DoNotCareSentinel,
OneOf[RightCurlyBraceMatchType],
AllOf[RightCurlyBraceMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseSmallStatementMatchType = Union[
"BaseSmallStatement", MetadataMatchType, MatchIfTrue[cst.BaseSmallStatement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SimpleStatementLine(BaseStatement, BaseMatcherNode):
body: Union[
Sequence[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
OneOf[
Union[
Sequence[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
trailing_whitespace: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SimpleStatementSuite(BaseSuite, BaseMatcherNode):
body: Union[
Sequence[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
DoNotCareSentinel,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
OneOf[
Union[
Sequence[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
AtLeastN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
AtMostN[
Union[
BaseSmallStatementMatchType,
OneOf[BaseSmallStatementMatchType],
AllOf[BaseSmallStatementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseSmallStatement]],
]
],
] = DoNotCare()
leading_whitespace: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
trailing_whitespace: Union[
TrailingWhitespaceMatchType,
DoNotCareSentinel,
OneOf[TrailingWhitespaceMatchType],
AllOf[TrailingWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SimpleString(BaseExpression, BaseString, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SimpleWhitespace(BaseParenthesizableWhitespace, BaseMatcherNode):
value: Union[
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Slice(BaseSlice, BaseMatcherNode):
lower: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
upper: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
step: Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
DoNotCareSentinel,
OneOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
AllOf[
Union[
Optional["BaseExpression"],
MetadataMatchType,
MatchIfTrue[Optional[cst.BaseExpression]],
]
],
] = DoNotCare()
first_colon: Union[
ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType]
] = DoNotCare()
second_colon: Union[
ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class StarredDictElement(BaseDictElement, BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
whitespace_before_value: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class StarredElement(BaseElement, BaseMatcherNode):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_before_value: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
SubscriptElementMatchType = Union[
"SubscriptElement", MetadataMatchType, MatchIfTrue[cst.SubscriptElement]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Subscript(
BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode
):
value: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
slice: Union[
Sequence[
Union[
SubscriptElementMatchType,
DoNotCareSentinel,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
AtLeastN[
Union[
SubscriptElementMatchType,
DoNotCareSentinel,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
AtMostN[
Union[
SubscriptElementMatchType,
DoNotCareSentinel,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.SubscriptElement]],
OneOf[
Union[
Sequence[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
AtLeastN[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
AtMostN[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.SubscriptElement]],
]
],
AllOf[
Union[
Sequence[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
AtLeastN[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
AtMostN[
Union[
SubscriptElementMatchType,
OneOf[SubscriptElementMatchType],
AllOf[SubscriptElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.SubscriptElement]],
]
],
] = DoNotCare()
lbracket: Union[
LeftSquareBracketMatchType,
DoNotCareSentinel,
OneOf[LeftSquareBracketMatchType],
AllOf[LeftSquareBracketMatchType],
] = DoNotCare()
rbracket: Union[
RightSquareBracketMatchType,
DoNotCareSentinel,
OneOf[RightSquareBracketMatchType],
AllOf[RightSquareBracketMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_after_value: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseSliceMatchType = Union["BaseSlice", MetadataMatchType, MatchIfTrue[cst.BaseSlice]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SubscriptElement(BaseMatcherNode):
slice: Union[
BaseSliceMatchType,
DoNotCareSentinel,
OneOf[BaseSliceMatchType],
AllOf[BaseSliceMatchType],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Subtract(BaseBinaryOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class SubtractAssign(BaseAugOp, BaseMatcherNode):
whitespace_before: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
whitespace_after: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class TrailingWhitespace(BaseMatcherNode):
whitespace: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
comment: Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
]
],
AllOf[
Union[
Optional["Comment"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Comment]],
]
],
] = DoNotCare()
newline: Union[
NewlineMatchType,
DoNotCareSentinel,
OneOf[NewlineMatchType],
AllOf[NewlineMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ExceptHandlerMatchType = Union[
"ExceptHandler", MetadataMatchType, MatchIfTrue[cst.ExceptHandler]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Try(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
handlers: Union[
Sequence[
Union[
ExceptHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
AtLeastN[
Union[
ExceptHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
AtMostN[
Union[
ExceptHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.ExceptHandler]],
OneOf[
Union[
Sequence[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
AtLeastN[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
AtMostN[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ExceptHandler]],
]
],
AllOf[
Union[
Sequence[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
AtLeastN[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
AtMostN[
Union[
ExceptHandlerMatchType,
OneOf[ExceptHandlerMatchType],
AllOf[ExceptHandlerMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ExceptHandler]],
]
],
] = DoNotCare()
orelse: Union[
Optional["Else"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Else]],
DoNotCareSentinel,
OneOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
AllOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
] = DoNotCare()
finalbody: Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
]
],
AllOf[
Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
ExceptStarHandlerMatchType = Union[
"ExceptStarHandler", MetadataMatchType, MatchIfTrue[cst.ExceptStarHandler]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class TryStar(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
handlers: Union[
Sequence[
Union[
ExceptStarHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
AtLeastN[
Union[
ExceptStarHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
AtMostN[
Union[
ExceptStarHandlerMatchType,
DoNotCareSentinel,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.ExceptStarHandler]],
OneOf[
Union[
Sequence[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
AtLeastN[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
AtMostN[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ExceptStarHandler]],
]
],
AllOf[
Union[
Sequence[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
AtLeastN[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
AtMostN[
Union[
ExceptStarHandlerMatchType,
OneOf[ExceptStarHandlerMatchType],
AllOf[ExceptStarHandlerMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.ExceptStarHandler]],
]
],
] = DoNotCare()
orelse: Union[
Optional["Else"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Else]],
DoNotCareSentinel,
OneOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
AllOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
] = DoNotCare()
finalbody: Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
]
],
AllOf[
Union[
Optional["Finally"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Finally]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Tuple(
BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, BaseMatcherNode
):
elements: Union[
Sequence[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
DoNotCareSentinel,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.BaseElement]],
OneOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
AllOf[
Union[
Sequence[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
AtLeastN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
AtMostN[
Union[
BaseElementMatchType,
OneOf[BaseElementMatchType],
AllOf[BaseElementMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.BaseElement]],
]
],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseUnaryOpMatchType = Union[
"BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp]
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class UnaryOperation(BaseExpression, BaseMatcherNode):
operator: Union[
BaseUnaryOpMatchType,
DoNotCareSentinel,
OneOf[BaseUnaryOpMatchType],
AllOf[BaseUnaryOpMatchType],
] = DoNotCare()
expression: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class While(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
test: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
orelse: Union[
Optional["Else"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Else]],
DoNotCareSentinel,
OneOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
AllOf[
Union[Optional["Else"], MetadataMatchType, MatchIfTrue[Optional[cst.Else]]]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
whitespace_after_while: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
WithItemMatchType = Union["WithItem", MetadataMatchType, MatchIfTrue[cst.WithItem]]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class With(BaseCompoundStatement, BaseStatement, BaseMatcherNode):
items: Union[
Sequence[
Union[
WithItemMatchType,
DoNotCareSentinel,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
AtLeastN[
Union[
WithItemMatchType,
DoNotCareSentinel,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
AtMostN[
Union[
WithItemMatchType,
DoNotCareSentinel,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.WithItem]],
OneOf[
Union[
Sequence[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
AtLeastN[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
AtMostN[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.WithItem]],
]
],
AllOf[
Union[
Sequence[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
AtLeastN[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
AtMostN[
Union[
WithItemMatchType,
OneOf[WithItemMatchType],
AllOf[WithItemMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.WithItem]],
]
],
] = DoNotCare()
body: Union[
BaseSuiteMatchType,
DoNotCareSentinel,
OneOf[BaseSuiteMatchType],
AllOf[BaseSuiteMatchType],
] = DoNotCare()
asynchronous: Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
DoNotCareSentinel,
OneOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
AllOf[
Union[
Optional["Asynchronous"],
MetadataMatchType,
MatchIfTrue[Optional[cst.Asynchronous]],
]
],
] = DoNotCare()
leading_lines: Union[
Sequence[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
DoNotCareSentinel,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.EmptyLine]],
OneOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
AllOf[
Union[
Sequence[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
AtLeastN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
AtMostN[
Union[
EmptyLineMatchType,
OneOf[EmptyLineMatchType],
AllOf[EmptyLineMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.EmptyLine]],
]
],
] = DoNotCare()
lpar: Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
] = DoNotCare()
rpar: Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
] = DoNotCare()
whitespace_after_with: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
whitespace_before_colon: Union[
SimpleWhitespaceMatchType,
DoNotCareSentinel,
OneOf[SimpleWhitespaceMatchType],
AllOf[SimpleWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class WithItem(BaseMatcherNode):
item: Union[
BaseExpressionMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionMatchType],
AllOf[BaseExpressionMatchType],
] = DoNotCare()
asname: Union[
Optional["AsName"],
MetadataMatchType,
MatchIfTrue[Optional[cst.AsName]],
DoNotCareSentinel,
OneOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
AllOf[
Union[
Optional["AsName"], MetadataMatchType, MatchIfTrue[Optional[cst.AsName]]
]
],
] = DoNotCare()
comma: Union[
CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType]
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
BaseExpressionOrFromOrNoneMatchType = Union[
"BaseExpression",
"From",
None,
MetadataMatchType,
MatchIfTrue[Union[cst.BaseExpression, cst.From, None]],
]
@dataclass(frozen=True, eq=False, unsafe_hash=False)
class Yield(BaseExpression, BaseMatcherNode):
value: Union[
BaseExpressionOrFromOrNoneMatchType,
DoNotCareSentinel,
OneOf[BaseExpressionOrFromOrNoneMatchType],
AllOf[BaseExpressionOrFromOrNoneMatchType],
] = DoNotCare()
lpar: Union[
Sequence[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
DoNotCareSentinel,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.LeftParen]],
OneOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
AllOf[
Union[
Sequence[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
AtLeastN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
AtMostN[
Union[
LeftParenMatchType,
OneOf[LeftParenMatchType],
AllOf[LeftParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.LeftParen]],
]
],
] = DoNotCare()
rpar: Union[
Sequence[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
DoNotCareSentinel,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
DoNotCareSentinel,
MatchIfTrue[Sequence[cst.RightParen]],
OneOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
AllOf[
Union[
Sequence[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
AtLeastN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
AtMostN[
Union[
RightParenMatchType,
OneOf[RightParenMatchType],
AllOf[RightParenMatchType],
]
],
]
],
MatchIfTrue[Sequence[cst.RightParen]],
]
],
] = DoNotCare()
whitespace_after_yield: Union[
BaseParenthesizableWhitespaceMatchType,
DoNotCareSentinel,
OneOf[BaseParenthesizableWhitespaceMatchType],
AllOf[BaseParenthesizableWhitespaceMatchType],
] = DoNotCare()
metadata: Union[
MetadataMatchType,
DoNotCareSentinel,
OneOf[MetadataMatchType],
AllOf[MetadataMatchType],
] = DoNotCare()
__all__ = [
"Add",
"AddAssign",
"AllOf",
"And",
"AnnAssign",
"Annotation",
"Arg",
"AsName",
"Assert",
"Assign",
"AssignEqual",
"AssignTarget",
"Asynchronous",
"AtLeastN",
"AtMostN",
"Attribute",
"AugAssign",
"Await",
"BaseAssignTargetExpression",
"BaseAugOp",
"BaseBinaryOp",
"BaseBooleanOp",
"BaseComp",
"BaseCompOp",
"BaseCompoundStatement",
"BaseDelTargetExpression",
"BaseDict",
"BaseDictElement",
"BaseElement",
"BaseExpression",
"BaseFormattedStringContent",
"BaseList",
"BaseMatcherNode",
"BaseMetadataProvider",
"BaseNumber",
"BaseParenthesizableWhitespace",
"BaseSet",
"BaseSimpleComp",
"BaseSlice",
"BaseSmallStatement",
"BaseStatement",
"BaseString",
"BaseSuite",
"BaseUnaryOp",
"BinaryOperation",
"BitAnd",
"BitAndAssign",
"BitInvert",
"BitOr",
"BitOrAssign",
"BitXor",
"BitXorAssign",
"BooleanOperation",
"Break",
"Call",
"ClassDef",
"Colon",
"Comma",
"Comment",
"CompFor",
"CompIf",
"Comparison",
"ComparisonTarget",
"ConcatenatedString",
"Continue",
"Decorator",
"Del",
"Dict",
"DictComp",
"DictElement",
"Divide",
"DivideAssign",
"DoNotCare",
"DoNotCareSentinel",
"DoesNotMatch",
"Dot",
"Element",
"Ellipsis",
"Else",
"EmptyLine",
"Equal",
"ExceptHandler",
"ExceptStarHandler",
"Expr",
"Finally",
"Float",
"FloorDivide",
"FloorDivideAssign",
"For",
"FormattedString",
"FormattedStringExpression",
"FormattedStringText",
"From",
"FunctionDef",
"GeneratorExp",
"Global",
"GreaterThan",
"GreaterThanEqual",
"If",
"IfExp",
"Imaginary",
"Import",
"ImportAlias",
"ImportFrom",
"ImportStar",
"In",
"IndentedBlock",
"Index",
"Integer",
"Is",
"IsNot",
"Lambda",
"LeftCurlyBrace",
"LeftParen",
"LeftShift",
"LeftShiftAssign",
"LeftSquareBracket",
"LessThan",
"LessThanEqual",
"List",
"ListComp",
"Match",
"MatchAs",
"MatchCase",
"MatchClass",
"MatchDecoratorMismatch",
"MatchIfTrue",
"MatchKeywordElement",
"MatchList",
"MatchMapping",
"MatchMappingElement",
"MatchMetadata",
"MatchMetadataIfTrue",
"MatchOr",
"MatchOrElement",
"MatchPattern",
"MatchRegex",
"MatchSequence",
"MatchSequenceElement",
"MatchSingleton",
"MatchStar",
"MatchTuple",
"MatchValue",
"MatcherDecoratableTransformer",
"MatcherDecoratableVisitor",
"MatrixMultiply",
"MatrixMultiplyAssign",
"Minus",
"Module",
"Modulo",
"ModuloAssign",
"Multiply",
"MultiplyAssign",
"Name",
"NameItem",
"NamedExpr",
"Newline",
"Nonlocal",
"Not",
"NotEqual",
"NotIn",
"OneOf",
"Or",
"Param",
"ParamSlash",
"ParamStar",
"Parameters",
"ParenthesizedWhitespace",
"Pass",
"Plus",
"Power",
"PowerAssign",
"Raise",
"Return",
"RightCurlyBrace",
"RightParen",
"RightShift",
"RightShiftAssign",
"RightSquareBracket",
"SaveMatchedNode",
"Semicolon",
"Set",
"SetComp",
"SimpleStatementLine",
"SimpleStatementSuite",
"SimpleString",
"SimpleWhitespace",
"Slice",
"StarredDictElement",
"StarredElement",
"Subscript",
"SubscriptElement",
"Subtract",
"SubtractAssign",
"TrailingWhitespace",
"Try",
"TryStar",
"Tuple",
"TypeOf",
"UnaryOperation",
"While",
"With",
"WithItem",
"Yield",
"ZeroOrMore",
"ZeroOrOne",
"call_if_inside",
"call_if_not_inside",
"extract",
"extractall",
"findall",
"leave",
"matches",
"replace",
"visit",
]
| 33.589779
| 88
| 0.42768
| 20,985
| 533,708
| 10.851227
| 0.020491
| 0.094777
| 0.031065
| 0.064037
| 0.927281
| 0.925687
| 0.924365
| 0.897867
| 0.883401
| 0.869331
| 0
| 0
| 0.510168
| 533,708
| 15,888
| 89
| 33.591893
| 0.871039
| 0.000435
| 0
| 0.852124
| 0
| 0
| 0.008349
| 0.000847
| 0
| 0
| 0
| 0
| 0.000195
| 1
| 0
| false
| 0.001753
| 0.010066
| 0
| 0.06923
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9610de5a3df992c7aa88b1bcf7db402a9189d9d6
| 186
|
py
|
Python
|
src/models/sequence/rnns/__init__.py
|
dumpmemory/state-spaces
|
2a85503cb3e9e86cc05753950d4a249df9a0fffb
|
[
"Apache-2.0"
] | 513
|
2021-11-03T23:08:23.000Z
|
2022-03-31T16:29:18.000Z
|
src/models/sequence/rnns/__init__.py
|
dumpmemory/state-spaces
|
2a85503cb3e9e86cc05753950d4a249df9a0fffb
|
[
"Apache-2.0"
] | 18
|
2021-11-05T12:42:59.000Z
|
2022-03-27T19:49:55.000Z
|
src/models/sequence/rnns/__init__.py
|
MikeOwino/state-spaces
|
b6672bca994b6a36347f414faa59761e42b1e2b1
|
[
"Apache-2.0"
] | 47
|
2021-11-04T01:32:54.000Z
|
2022-03-30T18:24:26.000Z
|
# Expose the cell registry and load all possible cells
from .cells.basic import CellBase
from .cells import basic
from .cells import hippo
from .cells import timestamp
from . import sru
| 26.571429
| 54
| 0.801075
| 29
| 186
| 5.137931
| 0.551724
| 0.241611
| 0.302013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 186
| 6
| 55
| 31
| 0.955128
| 0.27957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
82599f44878bd76e32706da8bb50c7570cb6b534
| 9,969
|
py
|
Python
|
ArraySimulation/tests/test_PVSource.py
|
lhr-solar/Array-Sim-Training
|
e7372ee4ed094f1527e5cdbc1817108d93acace8
|
[
"MIT"
] | null | null | null |
ArraySimulation/tests/test_PVSource.py
|
lhr-solar/Array-Sim-Training
|
e7372ee4ed094f1527e5cdbc1817108d93acace8
|
[
"MIT"
] | 12
|
2020-10-13T04:59:19.000Z
|
2021-02-28T04:13:22.000Z
|
ArraySimulation/tests/test_PVSource.py
|
lhr-solar/Array-Sim-Training
|
e7372ee4ed094f1527e5cdbc1817108d93acace8
|
[
"MIT"
] | null | null | null |
"""
test_PVSource.py
Author: Matthew Yu, Array Lead (2020).
Contact: matthewjkyu@gmail.com
Created: 11/16/20
Last Modified: 11/24/20
Description: Test file to see if the various implemented models run as expected.
"""
# Library Imports.
import pytest
import sys
sys.path.append("../")
# Custom Imports.
from ArraySimulation.PVSource.PVSource import PVSource
from ArraySimulation.PVSource.PVCell.PVCellIdeal import PVCellIdeal
from ArraySimulation.PVSource.PVCell.PVCellNonideal import PVCellNonideal
class TestPVSource:
def test_PVSourceDefault(self):
"""
Testing the default PVSource.
"""
# Single cell model.
source = PVSource()
source.setupModel()
try:
# Assert that we throw no module definitions for all methods of
# PVSource.
with pytest.raises(Exception) as excinfo:
source.getModuleCurrent(
{
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
}
)
assert "No cell model is defined for the PVSource." == str(excinfo.value)
with pytest.raises(Exception) as excinfo:
source.getSourceCurrent(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
}
)
assert "No cell model is defined for the PVSource." == str(excinfo.value)
with pytest.raises(Exception) as excinfo:
source.getIV(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
)
assert "No cell model is defined for the PVSource." == str(excinfo.value)
with pytest.raises(Exception) as excinfo:
source.getEdgeCharacteristics(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
)
assert "No cell model is defined for the PVSource." == str(excinfo.value)
# Assert that we get the correct model type.
assert source.getModelType() == "Default"
except Exception as e:
pytest.fail(str(e))
def test_PVSourceIdeal(self):
"""
Testing the PVSource with an Ideal PVCell model.
"""
source = PVSource()
source.setupModel("Ideal")
cell = PVCellIdeal()
try:
# Assert that we get the same module current output as that for a
# single cell.
assert (
source.getModuleCurrent(
{
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
}
)
== cell.getCurrent(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
)
# TODO: fix when implemented, and implement comments
assert (
source.getSourceCurrent(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
}
)
== cell.getCurrent(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
)
assert source.getIV(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellIV(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
assert source.getEdgeCharacteristics(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellEdgeCharacteristics(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
# Assert that we get the correct model type.
assert source.getModelType() == "Ideal"
except Exception as e:
pytest.fail(str(e))
def test_PVSourceNonideal(self):
"""
Testing the PVSource with an Nonideal PVCell model.
"""
source = PVSource()
source.setupModel("Nonideal")
cell = PVCellNonideal()
try:
# Assert that we get the same module current output as that for a
# single cell.
assert (
source.getModuleCurrent(
{
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
}
)
== cell.getCurrent(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
)
# TODO: fix when implemented
assert (
source.getSourceCurrent(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
}
)
== cell.getCurrent(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
)
assert source.getIV(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellIV(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
assert source.getEdgeCharacteristics(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellEdgeCharacteristics(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
# Assert that we get the correct model type.
assert source.getModelType() == "Nonideal"
except Exception as e:
pytest.fail(str(e))
def test_PVSourceNonidealLookup(self):
"""
Testing the PVSource with an Ideal PVCell model. Lookup is enabled.
"""
source = PVSource()
source.setupModel("Nonideal", True)
cell = PVCellNonideal()
try:
# Assert that we get the same module current output as that for a
# single cell.
assert source.getModuleCurrent(
{
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
}
) == cell.getCurrentLookup(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
# TODO: fix when implemented
assert source.getSourceCurrent(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
}
) == cell.getCurrentLookup(
numCells=1, voltage=0, irradiance=1000, temperature=25
)
assert source.getIV(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellIV(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
assert source.getEdgeCharacteristics(
{
"0": {
"numCells": 1,
"voltage": 0.0,
"irradiance": 1000,
"temperature": 25,
},
},
0.01,
) == cell.getCellEdgeCharacteristics(
numCells=1, resolution=0.01, irradiance=1000, temperature=25
)
# Assert that we get the correct model type.
assert source.getModelType() == "Nonideal"
except Exception as e:
pytest.fail(str(e))
| 32.472313
| 85
| 0.394824
| 714
| 9,969
| 5.505602
| 0.169468
| 0.064106
| 0.178072
| 0.192317
| 0.828797
| 0.808191
| 0.783007
| 0.774358
| 0.774358
| 0.751972
| 0
| 0.06332
| 0.518407
| 9,969
| 306
| 86
| 32.578431
| 0.755468
| 0.104624
| 0
| 0.628099
| 0
| 0
| 0.091724
| 0
| 0
| 0
| 0
| 0.009804
| 0.082645
| 1
| 0.016529
| false
| 0
| 0.020661
| 0
| 0.041322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82610e86eee7a9e312366046b4a3d0fb704bd205
| 18,442
|
py
|
Python
|
test/test_px_request_validator.py
|
wizzard/perimeterx-python-3-wsgi
|
03aded2c868dda3ad198c1f3ee944c775557d818
|
[
"MIT"
] | 1
|
2021-11-10T16:48:31.000Z
|
2021-11-10T16:48:31.000Z
|
test/test_px_request_validator.py
|
wizzard/perimeterx-python-3-wsgi
|
03aded2c868dda3ad198c1f3ee944c775557d818
|
[
"MIT"
] | 1
|
2020-08-31T10:50:25.000Z
|
2020-08-31T10:50:25.000Z
|
test/test_px_request_validator.py
|
wizzard/perimeterx-python-3-wsgi
|
03aded2c868dda3ad198c1f3ee944c775557d818
|
[
"MIT"
] | 4
|
2020-04-01T10:37:09.000Z
|
2020-12-03T12:34:13.000Z
|
import unittest
import mock
from werkzeug.test import EnvironBuilder
from werkzeug.wrappers import Request, Response
import perimeterx.px_constants as px_constants
from perimeterx.px_config import PxConfig
from perimeterx.px_context import PxContext
from perimeterx.px_request_verifier import PxRequestVerifier
class TestPxRequestVerifier(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING
})
cls.headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
cls.request_handler = PxRequestVerifier(cls.config)
def test_verify_request_fp_client_passed(self):
builder = EnvironBuilder(headers=self.headers, path='/fake_app_id/init.js')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, self.config)
response = Response("client data")
with mock.patch('perimeterx.px_proxy.PxProxy.handle_reverse_request', return_value=response):
response = self.request_handler.verify_request(context, request)
self.assertEqual(response.data.decode("utf-8"), "client data")
def test_verify_static_url(self):
builder = EnvironBuilder(headers=self.headers, path='/fake.css')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, self.config)
response = self.request_handler.verify_request(context, request)
self.assertEqual(response, True)
def test_verify_whitelist(self):
config = PxConfig({'app_id': 'PXfake_app_id', 'whitelist_routes': ['whitelisted']})
builder = EnvironBuilder(headers=self.headers, path='/whitelisted')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, config)
response = self.request_handler.verify_request(context, request)
self.assertEqual(response, True)
def test_handle_verification_pass(self):
config = PxConfig({'app_id': 'PXfake_app_id', 'whitelist_routes': ['whitelisted']})
builder = EnvironBuilder(headers=self.headers, path='/whitelisted')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, config)
context.score = 50
response = self.request_handler.handle_verification(context, request)
self.assertEqual(response, True)
def test_handle_verification_failed(self):
config = PxConfig({'app_id': 'PXfake_app_id', 'whitelist_routes': ['whitelisted']})
builder = EnvironBuilder(headers=self.headers, path='/whitelisted')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, config)
context.score = 100
response = self.request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_handle_monitor(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_MONITORING
});
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=self.headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response, True)
def test_bypass_monitor_header_enabled(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_MONITORING,
'bypass_monitor_header': 'x-px-block'
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'x-px-block': '1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_bypass_monitor_header_disabled(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_MONITORING,
'bypass_monitor_header': 'x-px-block'
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'x-px-block': '0',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response, True)
def test_bypass_monitor_header_configured_but_missing(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_MONITORING,
'bypass_monitor_header': 'x-px-block'
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response, True)
def test_bypass_monitor_header_on_valid_request(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_MONITORING,
'bypass_monitor_header': 'x-px-block'
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'x-px-block': '1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 0
response = request_handler.handle_verification(context, request)
self.assertEqual(response, True)
def test_specific_enforced_routes_with_enforced_route(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'enforced_specific_routes': ['/profile'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_specific_enforced_routes_with_enforced_route_regex(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'enforced_specific_routes_regex': [r'^/profile$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_specific_enforced_routes_with_unenforced_route(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'enforced_specific_routes': ['/profile'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.verify_request(context, request)
self.assertEqual(response, True)
def test_specific_enforced_routes_with_unenforced_route_regex(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'enforced_specific_routes_regex': [r'^/profile$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.verify_request(context, request)
self.assertEqual(response, True)
def test_monitor_specific_routes_in_blocking_mode(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes': ['/profile'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
response = request_handler.verify_request(context, request)
self.assertEqual(context.monitored_route, True)
def test_monitor_specific_routes_in_blocking_mode_regex(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes_regex': [r'^/profile$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
response = request_handler.verify_request(context, request)
self.assertEqual(context.monitored_route, True)
def test_monitor_specific_routes_in_blocking_mode_should_block_other_routes(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes': ['/profile'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_monitor_specific_routes_in_blocking_mode_should_block_other_routes_regex(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes_regex': [r'^/profile$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile/me')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_enforced_specific_routes_overrides_monitor_specific_routes(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes': ['/profile'],
'enforced_specific_routes': ['/profile', '/login'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_enforced_specific_routes_overrides_monitor_specific_routes(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes_regex': [r'^/profile$'],
'enforced_specific_routes_regex': [r'^/profile$', r'^/login$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
context.score = 100
response = request_handler.handle_verification(context, request)
self.assertEqual(response.status, '403 Forbidden')
def test_monitor_specific_routes_with_enforced_specific_routes(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes': ['/profile'],
'enforced_specific_routes': ['/login'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
response = request_handler.verify_request(context, request)
self.assertEqual(context.monitored_route, True)
def test_monitor_specific_routes_with_enforced_specific_routes_regex(self):
config = PxConfig({'app_id': 'PXfake_app_id',
'auth_token': '',
'module_mode': px_constants.MODULE_MODE_BLOCKING,
'monitored_specific_routes_regex': [r'^/profile$'],
'enforced_specific_routes_regex': [r'^/login$'],
});
headers = {'X-FORWARDED-FOR': '127.0.0.1',
'remote-addr': '127.0.0.1',
'content_length': '100'}
request_handler = PxRequestVerifier(config)
builder = EnvironBuilder(headers=headers, path='/profile')
env = builder.get_environ()
request = Request(env)
context = PxContext(request, request_handler.config)
response = request_handler.verify_request(context, request)
self.assertEqual(context.monitored_route, True)
| 48.78836
| 101
| 0.581607
| 1,796
| 18,442
| 5.713252
| 0.064031
| 0.07777
| 0.016568
| 0.019881
| 0.934217
| 0.932073
| 0.931001
| 0.924666
| 0.911217
| 0.904688
| 0
| 0.025391
| 0.305932
| 18,442
| 378
| 102
| 48.78836
| 0.77625
| 0
| 0
| 0.853868
| 0
| 0
| 0.161308
| 0.031123
| 0
| 0
| 0
| 0
| 0.063037
| 1
| 0.065903
| false
| 0.028653
| 0.022923
| 0
| 0.091691
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
826c976487f37d3070c3f4e3a4bfe723409784fc
| 441
|
py
|
Python
|
src/twisted/internet/iocpreactor/iocpsupport.py
|
giadram/twisted
|
4771b1340b822d20d0664bb7d8334e8fb7e52863
|
[
"MIT",
"Unlicense"
] | 4,612
|
2015-01-01T12:57:23.000Z
|
2022-03-30T01:08:23.000Z
|
src/twisted/internet/iocpreactor/iocpsupport.py
|
giadram/twisted
|
4771b1340b822d20d0664bb7d8334e8fb7e52863
|
[
"MIT",
"Unlicense"
] | 1,243
|
2015-01-23T17:23:59.000Z
|
2022-03-28T13:46:17.000Z
|
src/twisted/internet/iocpreactor/iocpsupport.py
|
giadram/twisted
|
4771b1340b822d20d0664bb7d8334e8fb7e52863
|
[
"MIT",
"Unlicense"
] | 1,236
|
2015-01-13T14:41:26.000Z
|
2022-03-17T07:12:36.000Z
|
__all__ = [
"CompletionPort",
"Event",
"accept",
"connect",
"get_accept_addrs",
"have_connectex",
"makesockaddr",
"maxAddrLen",
"recv",
"recvfrom",
"send",
]
from twisted_iocpsupport.iocpsupport import ( # type: ignore[import]
CompletionPort,
Event,
accept,
connect,
get_accept_addrs,
have_connectex,
makesockaddr,
maxAddrLen,
recv,
recvfrom,
send,
)
| 15.75
| 69
| 0.596372
| 37
| 441
| 6.810811
| 0.540541
| 0.150794
| 0.198413
| 0.253968
| 0.769841
| 0.769841
| 0.769841
| 0.769841
| 0.769841
| 0.769841
| 0
| 0
| 0.281179
| 441
| 27
| 70
| 16.333333
| 0.794953
| 0.045351
| 0
| 0
| 0
| 0
| 0.238663
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82ba5acc424035520073287fd9409a5d89d4097d
| 17,583
|
py
|
Python
|
RecoBTag/CTagging/python/training_settings.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoBTag/CTagging/python/training_settings.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoBTag/CTagging/python/training_settings.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
## IMPORTANT!
## This file was automatically generated by RecoBTag/CTagging/test/dump_training_vars_cfg.py
## with input xml files:
## - C vs L: ../data/c_vs_udsg.weight.xml sha1 checksum: 1b50773894bf3c64e41694bd48bda5f6f0e3795b
## - C vs B: ../data/c_vs_b.weight.xml sha1 checksum: c342f54c6448d488e6e2b483a3a3956e34ad8ea1
c_vs_l_vars_vpset = cms.VPSet(cms.PSet(
default = cms.double(-1),
name = cms.string('vertexLeptonCategory'),
taggingVarName = cms.string('vertexLeptonCategory')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(0),
name = cms.string('trackSip2dSig_0'),
taggingVarName = cms.string('trackSip2dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(1),
name = cms.string('trackSip2dSig_1'),
taggingVarName = cms.string('trackSip2dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(0),
name = cms.string('trackSip3dSig_0'),
taggingVarName = cms.string('trackSip3dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(1),
name = cms.string('trackSip3dSig_1'),
taggingVarName = cms.string('trackSip3dSig')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackPtRel_0'),
taggingVarName = cms.string('trackPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackPtRel_1'),
taggingVarName = cms.string('trackPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackPPar_0'),
taggingVarName = cms.string('trackPPar')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackPPar_1'),
taggingVarName = cms.string('trackPPar')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackEtaRel_0'),
taggingVarName = cms.string('trackEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackEtaRel_1'),
taggingVarName = cms.string('trackEtaRel')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackDeltaR_0'),
taggingVarName = cms.string('trackDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackDeltaR_1'),
taggingVarName = cms.string('trackDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackPtRatio_0'),
taggingVarName = cms.string('trackPtRatio')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackPtRatio_1'),
taggingVarName = cms.string('trackPtRatio')
),
cms.PSet(
default = cms.double(1.1),
idx = cms.int32(0),
name = cms.string('trackPParRatio_0'),
taggingVarName = cms.string('trackPParRatio')
),
cms.PSet(
default = cms.double(1.1),
idx = cms.int32(1),
name = cms.string('trackPParRatio_1'),
taggingVarName = cms.string('trackPParRatio')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackJetDist_0'),
taggingVarName = cms.string('trackJetDist')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackJetDist_1'),
taggingVarName = cms.string('trackJetDist')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackDecayLenVal_0'),
taggingVarName = cms.string('trackDecayLenVal')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackDecayLenVal_1'),
taggingVarName = cms.string('trackDecayLenVal')
),
cms.PSet(
default = cms.double(0),
name = cms.string('jetNSecondaryVertices'),
taggingVarName = cms.string('jetNSecondaryVertices')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('jetNTracks'),
taggingVarName = cms.string('jetNTracks')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('trackSumJetEtRatio'),
taggingVarName = cms.string('trackSumJetEtRatio')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('trackSumJetDeltaR'),
taggingVarName = cms.string('trackSumJetDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexMass_0'),
taggingVarName = cms.string('vertexMass')
),
cms.PSet(
default = cms.double(-10),
idx = cms.int32(0),
name = cms.string('vertexEnergyRatio_0'),
taggingVarName = cms.string('vertexEnergyRatio')
),
cms.PSet(
default = cms.double(-999),
idx = cms.int32(0),
name = cms.string('trackSip2dSigAboveCharm_0'),
taggingVarName = cms.string('trackSip2dSigAboveCharm')
),
cms.PSet(
default = cms.double(-999),
idx = cms.int32(0),
name = cms.string('trackSip3dSigAboveCharm_0'),
taggingVarName = cms.string('trackSip3dSigAboveCharm')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('flightDistance2dSig_0'),
taggingVarName = cms.string('flightDistance2dSig')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('flightDistance3dSig_0'),
taggingVarName = cms.string('flightDistance3dSig')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexJetDeltaR_0'),
taggingVarName = cms.string('vertexJetDeltaR')
),
cms.PSet(
default = cms.double(0),
idx = cms.int32(0),
name = cms.string('vertexNTracks_0'),
taggingVarName = cms.string('vertexNTracks')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('massVertexEnergyFraction_0'),
taggingVarName = cms.string('massVertexEnergyFraction')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexBoostOverSqrtJetPt_0'),
taggingVarName = cms.string('vertexBoostOverSqrtJetPt')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonPtRel_0'),
taggingVarName = cms.string('leptonPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonPtRel_1'),
taggingVarName = cms.string('leptonPtRel')
),
cms.PSet(
default = cms.double(-10000),
idx = cms.int32(0),
name = cms.string('leptonSip3d_0'),
taggingVarName = cms.string('leptonSip3d')
),
cms.PSet(
default = cms.double(-10000),
idx = cms.int32(1),
name = cms.string('leptonSip3d_1'),
taggingVarName = cms.string('leptonSip3d')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonDeltaR_0'),
taggingVarName = cms.string('leptonDeltaR')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonDeltaR_1'),
taggingVarName = cms.string('leptonDeltaR')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonRatioRel_0'),
taggingVarName = cms.string('leptonRatioRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonRatioRel_1'),
taggingVarName = cms.string('leptonRatioRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonEtaRel_0'),
taggingVarName = cms.string('leptonEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonEtaRel_1'),
taggingVarName = cms.string('leptonEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonRatio_0'),
taggingVarName = cms.string('leptonRatio')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonRatio_1'),
taggingVarName = cms.string('leptonRatio')
))
c_vs_b_vars_vpset = cms.VPSet(cms.PSet(
default = cms.double(-1),
name = cms.string('vertexLeptonCategory'),
taggingVarName = cms.string('vertexLeptonCategory')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(0),
name = cms.string('trackSip2dSig_0'),
taggingVarName = cms.string('trackSip2dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(1),
name = cms.string('trackSip2dSig_1'),
taggingVarName = cms.string('trackSip2dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(0),
name = cms.string('trackSip3dSig_0'),
taggingVarName = cms.string('trackSip3dSig')
),
cms.PSet(
default = cms.double(-100),
idx = cms.int32(1),
name = cms.string('trackSip3dSig_1'),
taggingVarName = cms.string('trackSip3dSig')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackPtRel_0'),
taggingVarName = cms.string('trackPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackPtRel_1'),
taggingVarName = cms.string('trackPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackPPar_0'),
taggingVarName = cms.string('trackPPar')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackPPar_1'),
taggingVarName = cms.string('trackPPar')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('trackEtaRel_0'),
taggingVarName = cms.string('trackEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('trackEtaRel_1'),
taggingVarName = cms.string('trackEtaRel')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackDeltaR_0'),
taggingVarName = cms.string('trackDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackDeltaR_1'),
taggingVarName = cms.string('trackDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackPtRatio_0'),
taggingVarName = cms.string('trackPtRatio')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackPtRatio_1'),
taggingVarName = cms.string('trackPtRatio')
),
cms.PSet(
default = cms.double(1.1),
idx = cms.int32(0),
name = cms.string('trackPParRatio_0'),
taggingVarName = cms.string('trackPParRatio')
),
cms.PSet(
default = cms.double(1.1),
idx = cms.int32(1),
name = cms.string('trackPParRatio_1'),
taggingVarName = cms.string('trackPParRatio')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackJetDist_0'),
taggingVarName = cms.string('trackJetDist')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackJetDist_1'),
taggingVarName = cms.string('trackJetDist')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('trackDecayLenVal_0'),
taggingVarName = cms.string('trackDecayLenVal')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(1),
name = cms.string('trackDecayLenVal_1'),
taggingVarName = cms.string('trackDecayLenVal')
),
cms.PSet(
default = cms.double(0),
name = cms.string('jetNSecondaryVertices'),
taggingVarName = cms.string('jetNSecondaryVertices')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('jetNTracks'),
taggingVarName = cms.string('jetNTracks')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('trackSumJetEtRatio'),
taggingVarName = cms.string('trackSumJetEtRatio')
),
cms.PSet(
default = cms.double(-0.1),
name = cms.string('trackSumJetDeltaR'),
taggingVarName = cms.string('trackSumJetDeltaR')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexMass_0'),
taggingVarName = cms.string('vertexMass')
),
cms.PSet(
default = cms.double(-10),
idx = cms.int32(0),
name = cms.string('vertexEnergyRatio_0'),
taggingVarName = cms.string('vertexEnergyRatio')
),
cms.PSet(
default = cms.double(-999),
idx = cms.int32(0),
name = cms.string('trackSip2dSigAboveCharm_0'),
taggingVarName = cms.string('trackSip2dSigAboveCharm')
),
cms.PSet(
default = cms.double(-999),
idx = cms.int32(0),
name = cms.string('trackSip3dSigAboveCharm_0'),
taggingVarName = cms.string('trackSip3dSigAboveCharm')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('flightDistance2dSig_0'),
taggingVarName = cms.string('flightDistance2dSig')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('flightDistance3dSig_0'),
taggingVarName = cms.string('flightDistance3dSig')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexJetDeltaR_0'),
taggingVarName = cms.string('vertexJetDeltaR')
),
cms.PSet(
default = cms.double(0),
idx = cms.int32(0),
name = cms.string('vertexNTracks_0'),
taggingVarName = cms.string('vertexNTracks')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('massVertexEnergyFraction_0'),
taggingVarName = cms.string('massVertexEnergyFraction')
),
cms.PSet(
default = cms.double(-0.1),
idx = cms.int32(0),
name = cms.string('vertexBoostOverSqrtJetPt_0'),
taggingVarName = cms.string('vertexBoostOverSqrtJetPt')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonPtRel_0'),
taggingVarName = cms.string('leptonPtRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonPtRel_1'),
taggingVarName = cms.string('leptonPtRel')
),
cms.PSet(
default = cms.double(-10000),
idx = cms.int32(0),
name = cms.string('leptonSip3d_0'),
taggingVarName = cms.string('leptonSip3d')
),
cms.PSet(
default = cms.double(-10000),
idx = cms.int32(1),
name = cms.string('leptonSip3d_1'),
taggingVarName = cms.string('leptonSip3d')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonDeltaR_0'),
taggingVarName = cms.string('leptonDeltaR')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonDeltaR_1'),
taggingVarName = cms.string('leptonDeltaR')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonRatioRel_0'),
taggingVarName = cms.string('leptonRatioRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonRatioRel_1'),
taggingVarName = cms.string('leptonRatioRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonEtaRel_0'),
taggingVarName = cms.string('leptonEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonEtaRel_1'),
taggingVarName = cms.string('leptonEtaRel')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(0),
name = cms.string('leptonRatio_0'),
taggingVarName = cms.string('leptonRatio')
),
cms.PSet(
default = cms.double(-1),
idx = cms.int32(1),
name = cms.string('leptonRatio_1'),
taggingVarName = cms.string('leptonRatio')
))
| 31.120354
| 100
| 0.56094
| 1,833
| 17,583
| 5.326787
| 0.044735
| 0.17329
| 0.134781
| 0.163662
| 0.969889
| 0.969889
| 0.969889
| 0.969889
| 0.969889
| 0.969889
| 0
| 0.04795
| 0.288347
| 17,583
| 564
| 101
| 31.175532
| 0.732358
| 0.017915
| 0
| 0.994595
| 1
| 0
| 0.162031
| 0.032452
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001802
| 0
| 0.001802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
82ced1d500d431963253f675d0710bc87dbfeb1b
| 7,916
|
py
|
Python
|
jupyterlab_git/tests/test_pushpull.py
|
System1Bio/jupyterlab-git
|
bbae461e41c66e8ef1ed031b5bdca252b2e14f41
|
[
"BSD-3-Clause"
] | null | null | null |
jupyterlab_git/tests/test_pushpull.py
|
System1Bio/jupyterlab-git
|
bbae461e41c66e8ef1ed031b5bdca252b2e14f41
|
[
"BSD-3-Clause"
] | null | null | null |
jupyterlab_git/tests/test_pushpull.py
|
System1Bio/jupyterlab-git
|
bbae461e41c66e8ef1ed031b5bdca252b2e14f41
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from unittest.mock import Mock, call, patch
import pytest
import tornado
from jupyterlab_git.git import Git
from .testutils import FakeContentManager
@pytest.mark.asyncio
async def test_git_pull_fail():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(1, "output", "Authentication failed")
)
# When
actual_response = await Git(FakeContentManager("/bin")).pull(
"test_curr_path"
)
# Then
mock_execute.assert_called_once_with(
["git", "pull", "--no-commit"],
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "0"},
)
assert {"code": 1, "message": "Authentication failed"} == actual_response
@pytest.mark.asyncio
async def test_git_pull_with_auth_fail():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute_with_authentication:
# Given
mock_execute_with_authentication.return_value = tornado.gen.maybe_future(
(
1,
"",
"remote: Invalid username or password.\r\nfatal: Authentication failed for 'repo_url'",
)
)
# When
auth = {"username": "asdf", "password": "qwerty"}
actual_response = await Git(FakeContentManager("/bin")).pull(
"test_curr_path", auth
)
# Then
mock_execute_with_authentication.assert_called_once_with(
["git", "pull", "--no-commit"],
username="asdf",
password="qwerty",
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "1"},
)
assert {
"code": 1,
"message": "remote: Invalid username or password.\r\nfatal: Authentication failed for 'repo_url'",
} == actual_response
@pytest.mark.asyncio
async def test_git_pull_success():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future((0, "output", ""))
# When
actual_response = await Git(FakeContentManager("/bin")).pull(
"test_curr_path"
)
# Then
mock_execute.assert_called_once_with(
["git", "pull", "--no-commit"],
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "0"},
)
assert {"code": 0} == actual_response
@pytest.mark.asyncio
async def test_git_pull_with_auth_success():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute_with_authentication:
# Given
mock_execute_with_authentication.return_value = tornado.gen.maybe_future(
(0, "", "output")
)
# When
auth = {"username": "asdf", "password": "qwerty"}
actual_response = await Git(FakeContentManager("/bin")).pull(
"test_curr_path", auth
)
# Then
mock_execute_with_authentication.assert_called_once_with(
["git", "pull", "--no-commit"],
username="asdf",
password="qwerty",
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "1"},
)
assert {"code": 0} == actual_response
@pytest.mark.asyncio
async def test_git_push_fail():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(1, "output", "Authentication failed")
)
# When
actual_response = await Git(FakeContentManager("/bin")).push(
"test_origin", "HEAD:test_master", "test_curr_path"
)
# Then
mock_execute.assert_called_once_with(
["git", "push", "test_origin", "HEAD:test_master"],
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "0"},
)
assert {"code": 1, "message": "Authentication failed"} == actual_response
@pytest.mark.asyncio
async def test_git_push_with_auth_fail():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute_with_authentication:
# Given
mock_execute_with_authentication.return_value = tornado.gen.maybe_future(
(
1,
"",
"remote: Invalid username or password.\r\nfatal: Authentication failed for 'repo_url'",
)
)
# When
auth = {"username": "asdf", "password": "qwerty"}
actual_response = await Git(FakeContentManager("/bin")).push(
"test_origin", "HEAD:test_master", "test_curr_path", auth
)
# Then
mock_execute_with_authentication.assert_called_once_with(
["git", "push", "test_origin", "HEAD:test_master"],
username="asdf",
password="qwerty",
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "1"},
)
assert {
"code": 1,
"message": "remote: Invalid username or password.\r\nfatal: Authentication failed for 'repo_url'",
} == actual_response
@pytest.mark.asyncio
async def test_git_push_success():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute:
# Given
mock_execute.return_value = tornado.gen.maybe_future(
(0, "output", "does not matter")
)
# When
actual_response = await Git(FakeContentManager("/bin")).push(
".", "HEAD:test_master", "test_curr_path"
)
# Then
mock_execute.assert_called_once_with(
["git", "push", ".", "HEAD:test_master"],
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "0"},
)
assert {"code": 0} == actual_response
@pytest.mark.asyncio
async def test_git_push_with_auth_success():
with patch("os.environ", {"TEST": "test"}):
with patch("jupyterlab_git.git.execute") as mock_execute_with_authentication:
# Given
mock_execute_with_authentication.return_value = tornado.gen.maybe_future(
(0, "", "does not matter")
)
# When
auth = {"username": "asdf", "password": "qwerty"}
actual_response = await Git(FakeContentManager("/bin")).push(
".", "HEAD:test_master", "test_curr_path", auth
)
# Then
mock_execute_with_authentication.assert_called_once_with(
["git", "push", ".", "HEAD:test_master"],
username="asdf",
password="qwerty",
cwd=os.path.join("/bin", "test_curr_path"),
env={"TEST": "test", "GIT_TERMINAL_PROMPT": "1"},
)
assert {"code": 0} == actual_response
| 35.182222
| 114
| 0.535498
| 798
| 7,916
| 5.050125
| 0.095238
| 0.065509
| 0.047643
| 0.086352
| 0.960298
| 0.960298
| 0.960298
| 0.960298
| 0.950372
| 0.948883
| 0
| 0.004535
| 0.331481
| 7,916
| 224
| 115
| 35.339286
| 0.756992
| 0.016043
| 0
| 0.689873
| 0
| 0
| 0.234961
| 0.026794
| 0
| 0
| 0
| 0
| 0.101266
| 1
| 0
| false
| 0.075949
| 0.037975
| 0
| 0.037975
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7dbd0a15c9965ac8249a87df65a49584767e6078
| 4,584
|
py
|
Python
|
main_program.py
|
gerardomartinezhi/robotsim
|
0720795bf7cd009e412ec773ae0c8562f046475b
|
[
"MIT"
] | 1
|
2021-11-10T02:03:06.000Z
|
2021-11-10T02:03:06.000Z
|
main_program.py
|
gerardomartinezhi/robotsim
|
0720795bf7cd009e412ec773ae0c8562f046475b
|
[
"MIT"
] | 18
|
2020-10-06T18:49:24.000Z
|
2020-10-25T01:35:29.000Z
|
main_program.py
|
gerardomartinezhi/robotsim
|
0720795bf7cd009e412ec773ae0c8562f046475b
|
[
"MIT"
] | 2
|
2021-10-31T15:38:31.000Z
|
2021-11-10T01:59:54.000Z
|
'''
Control:
robot.move_forward()
robot.rotate_right()
robot.rotate_left()
robot.display_color(string)
robot.finish_round()
Sensors:
robot.ultrasonic_front() -> int
robot.ultrasonic_right() -> int
robot.ultrasonic_left() -> int
robot.get_color() -> string
'''
def main():
robot.move_forward()
robot.display_color(robot.get_color())
robot.rotate_right()
robot.move_forward()
robot.display_color(robot.get_color())
robot.rotate_right()
robot.display_color(robot.get_color())
robot.move_forward()
robot.display_color(robot.get_color())
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.rotate_right()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_left()
robot.rotate_left()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.rotate_right()
robot.move_forward()
robot.finish_round()
if __name__ == "__main__":
main()
| 24.645161
| 42
| 0.677574
| 556
| 4,584
| 5.246403
| 0.03777
| 0.308536
| 0.548509
| 0.719918
| 0.939664
| 0.939664
| 0.939664
| 0.931094
| 0.916352
| 0.916352
| 0
| 0
| 0.198298
| 4,584
| 186
| 43
| 24.645161
| 0.793742
| 0.062609
| 0
| 0.976608
| 0
| 0
| 0.001865
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005848
| true
| 0
| 0
| 0
| 0.005848
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
815e3a5cb8455824dc444d83e29fe7ce4d56ed66
| 127
|
py
|
Python
|
example/celery/src/spaceone/work/service/__init__.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | 14
|
2020-06-01T08:17:43.000Z
|
2022-01-13T22:37:50.000Z
|
example/celery/src/spaceone/work/service/__init__.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | 7
|
2020-08-11T23:05:59.000Z
|
2022-01-12T05:08:49.000Z
|
example/celery/src/spaceone/work/service/__init__.py
|
jihyungSong/python-core
|
898ead301363d3e599ecd645b73071e639f886b0
|
[
"Apache-2.0"
] | 11
|
2020-06-01T08:17:49.000Z
|
2021-11-25T08:26:37.000Z
|
from spaceone.work.service.domain_service import DomainService
from spaceone.work.service.celery_service import ScheduleService
| 63.5
| 64
| 0.897638
| 16
| 127
| 7
| 0.5625
| 0.214286
| 0.285714
| 0.410714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055118
| 127
| 2
| 64
| 63.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
81b8c5e41b2adc136aa58c2359b138a3d2a6bde6
| 10,673
|
py
|
Python
|
contrib/pycoin/tests/tx_cmdline_test.py
|
gwangjin2/gwangcoin-core
|
588e357e13c385906729d9078b796dd740745445
|
[
"MIT"
] | null | null | null |
contrib/pycoin/tests/tx_cmdline_test.py
|
gwangjin2/gwangcoin-core
|
588e357e13c385906729d9078b796dd740745445
|
[
"MIT"
] | null | null | null |
contrib/pycoin/tests/tx_cmdline_test.py
|
gwangjin2/gwangcoin-core
|
588e357e13c385906729d9078b796dd740745445
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import unittest
import os
import subprocess
import sys
import tempfile
TEST_CASES = [
(
"tx.py 010000000141045e0ab2b0b82cdefaf9e9a8ca9ec9df17673d6a74e274d0c73ae77d3f131e000000004a493046022100a7f26eda874931999c90f87f01ff1ffc76bcd058fe16137e0e63fdb6a35c2d78022100a61e9199238eb73f07c8f209504c84b80f03e30ed8169edd44f80ed17ddf451901ffffffff010010a5d4e80000001976a9147ec1003336542cae8bded8909cdd6b5e48ba0ab688ac00000000","""\
Version: 1 tx hash 49d2adb6e476fa46d8357babf78b1b501fd39e177ac7833124b3f67b17c40c2a 159 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: (unknown) from 1e133f7de73ac7d074e2746a3d6717dfc99ecaa8e9f9fade2cb8b0b20a5e0441:0
Output:
0: 1CZDM6oTttND6WPdt3D6bydo7DYKzd9Qik receives 10000000.00000 mBTC
Total output 10000000.00000 mBTC
including unspents in hex dump since transaction not fully signed
010000000141045e0ab2b0b82cdefaf9e9a8ca9ec9df17673d6a74e274d0c73ae77d3f131e000000004a493046022100a7f26eda874931999c90f87f01ff1ffc76bcd058fe16137e0e63fdb6a35c2d78022100a61e9199238eb73f07c8f209504c84b80f03e30ed8169edd44f80ed17ddf451901ffffffff010010a5d4e80000001976a9147ec1003336542cae8bded8909cdd6b5e48ba0ab688ac00000000
"""
),
(
"tx.py 01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000",
'''Version: 1 tx hash 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098 134 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: COINBASE 50000.00000 mBTC
Output:
0: 12c6DSiU4Rq3P4ZxziKxzrL5LmMBrzjrJX receives 50000.00000 mBTC
Total input 50000.00000 mBTC
Total output 50000.00000 mBTC
Total fees 0.00000 mBTC
01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000
all incoming transaction values validated\n'''
),
(
"tx.py -C 01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000",
"""Version: 1 tx hash 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098 134 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: COINBASE 50000.00000 mBTC
Output:
0: 12c6DSiU4Rq3P4ZxziKxzrL5LmMBrzjrJX receives 50000.00000 mBTC
Total input 50000.00000 mBTC\nTotal output 50000.00000 mBTC
Total fees 0.00000 mBTC
01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000
all incoming transaction values validated\n"""
),
(
"tx.py 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098/0/410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac/5000000000 1KissFDVu2wAYWPRm4UGh5ZCDU9sE9an8T -o tx.bin",
'all incoming transaction values validated\n'
),
(
"tx.py tx.bin",
"""\
Version: 1 tx hash 3d36aed60ecb311a55a6329f5c2af785f06e147fc35b7678eb798eca7f603c83 85 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: 12c6DSiU4Rq3P4ZxziKxzrL5LmMBrzjrJX from 0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098:0 50000.00000 mBTC BAD SIG
Output:
0: 1KissFDVu2wAYWPRm4UGh5ZCDU9sE9an8T receives 49999.90000 mBTC
Total input 50000.00000 mBTC
Total output 49999.90000 mBTC
Total fees 0.10000 mBTC
including unspents in hex dump since transaction not fully signed
0100000001982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e0000000000ffffffff01f0ca052a010000001976a914cd5dc792f0abb0aa8ba4ca36c9fe5eda8e495ff988ac0000000000f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac
all incoming transaction values validated\n"""
),
(
"tx.py -C 010000000135b0092a869bca1bc43e1628b3cb9e56ff9099a271fe95755e6f9289cf885b98000000008c4930460221008342b7eee70400acfed8d68be5aa8a6aeb06d7a2b3aef1fab7e4c1e46391efc6022100c0f86ba04f9a43c0d7fc8ab4cf9f3292989d3067f9b04e013c4a96bee87d5e1c014104dbedbe0028b3cbf362cad654c3b3e0902f65004691fa1332e94a31202ff06f4f7e67bd57d278c6a40b1915feb3bfb6850ca3750456e4c9af9db3d57a22b65323ffffffff02102f3504000000001976a9149b92770a85b1252448ec69900e77f1371d6a620188ac4e61bc00000000001976a91491b24bf9f5288532960ac687abb035127b1d28a588ac00000000",
"""Version: 1 tx hash d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a 259 bytes \nTxIn count: 1; TxOut count: 2\nLock time: 0 (valid anytime)\nInput:\n 0: 1NPcbLkfWU1vFYHBG4i3XB4uQaj4P7PHr2 from 985b88cf89926f5e7595fe71a29990ff569ecbb328163ec41bca9b862a09b035:0\nOutputs:\n 0: 1FBbCJSHrcAwuyEvgjZPpHP8jGAbiCPitz receives 705.94320 mBTC\n 1: 1EHNa6Q4Jz2uvNExL497mE43ikXhwF6kZm receives 123.45678 mBTC\nTotal output 829.39998 mBTC\nincluding unspents in hex dump since transaction not fully signed\n010000000135b0092a869bca1bc43e1628b3cb9e56ff9099a271fe95755e6f9289cf885b98000000008c4930460221008342b7eee70400acfed8d68be5aa8a6aeb06d7a2b3aef1fab7e4c1e46391efc6022100c0f86ba04f9a43c0d7fc8ab4cf9f3292989d3067f9b04e013c4a96bee87d5e1c014104dbedbe0028b3cbf362cad654c3b3e0902f65004691fa1332e94a31202ff06f4f7e67bd57d278c6a40b1915feb3bfb6850ca3750456e4c9af9db3d57a22b65323ffffffff02102f3504000000001976a9149b92770a85b1252448ec69900e77f1371d6a620188ac4e61bc00000000001976a91491b24bf9f5288532960ac687abb035127b1d28a588ac00000000\n"""
),
(
"tx.py d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a",
"""\
Version: 1 tx hash d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a 259 bytes
TxIn count: 1; TxOut count: 2
Lock time: 0 (valid anytime)
Input:
0: 1NPcbLkfWU1vFYHBG4i3XB4uQaj4P7PHr2 from 985b88cf89926f5e7595fe71a29990ff569ecbb328163ec41bca9b862a09b035:0
Outputs:
0: 1FBbCJSHrcAwuyEvgjZPpHP8jGAbiCPitz receives 705.94320 mBTC
1: 1EHNa6Q4Jz2uvNExL497mE43ikXhwF6kZm receives 123.45678 mBTC
Total output 829.39998 mBTC
including unspents in hex dump since transaction not fully signed
010000000135b0092a869bca1bc43e1628b3cb9e56ff9099a271fe95755e6f9289cf885b98000000008c4930460221008342b7eee70400acfed8d68be5aa8a6aeb06d7a2b3aef1fab7e4c1e46391efc6022100c0f86ba04f9a43c0d7fc8ab4cf9f3292989d3067f9b04e013c4a96bee87d5e1c014104dbedbe0028b3cbf362cad654c3b3e0902f65004691fa1332e94a31202ff06f4f7e67bd57d278c6a40b1915feb3bfb6850ca3750456e4c9af9db3d57a22b65323ffffffff02102f3504000000001976a9149b92770a85b1252448ec69900e77f1371d6a620188ac4e61bc00000000001976a91491b24bf9f5288532960ac687abb035127b1d28a588ac00000000
"""
),
(
"tx.py d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a/1/76a91491b24bf9f5288532960ac687abb035127b1d28a588ac/12345678 1KissFDVu2wAYWPRm4UGh5ZCDU9sE9an8T -o tx.bin",
"all incoming transaction values validated\n"
),
(
"tx.py tx.bin",
"""\
Version: 1 tx hash ab963a39df0e095bbd76840de90fe208e903d5d43e891ef245b217dbcd29a8a7 85 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: 1EHNa6Q4Jz2uvNExL497mE43ikXhwF6kZm from d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a:1 123.45678 mBTC BAD SIG
Output:
0: 1KissFDVu2wAYWPRm4UGh5ZCDU9sE9an8T receives 123.35678 mBTC
Total input 123.45678 mBTC
Total output 123.35678 mBTC
Total fees 0.10000 mBTC
including unspents in hex dump since transaction not fully signed
01000000012a6ca22af83bc447f74b1f475c9bd2e99ccef7347144572a9de5bcf5a5a21ad60100000000ffffffff013e3abc00000000001976a914cd5dc792f0abb0aa8ba4ca36c9fe5eda8e495ff988ac000000004e61bc00000000001976a91491b24bf9f5288532960ac687abb035127b1d28a588ac
all incoming transaction values validated
"""
),
(
"tx.py tx.bin KwDiBf89QgGbjEhKnhXJuH7LrciVrZi3qYjgd9M7rFU73sVHnoWn -o signed_tx.hex",
'all incoming transaction values validated\n'
),
(
"tx.py -a signed_tx.hex",
"""\
Version: 1 tx hash 0995cf6f55e1cf22f7c31f5ad52d111e897b0b9b7e37a1bb755a470324b4a2c4 224 bytes
TxIn count: 1; TxOut count: 1
Lock time: 0 (valid anytime)
Input:
0: 1EHNa6Q4Jz2uvNExL497mE43ikXhwF6kZm from d61aa2a5f5bce59d2a57447134f7ce9ce9d29b5c471f4bf747c43bf82aa26c2a:1 123.45678 mBTC sig ok
Output:
0: 1KissFDVu2wAYWPRm4UGh5ZCDU9sE9an8T receives 123.35678 mBTC
Total input 123.45678 mBTC
Total output 123.35678 mBTC
Total fees 0.10000 mBTC
01000000012a6ca22af83bc447f74b1f475c9bd2e99ccef7347144572a9de5bcf5a5a21ad6010000008b48304502210084fd73b302520381dea1885efda58bc446653998864db7a2cd04906fc6d5536302206325303c8e50f84d25c95eff2849441382d4aafb2f678f636a6d164b721bf0f101410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ffffffff013e3abc00000000001976a914cd5dc792f0abb0aa8ba4ca36c9fe5eda8e495ff988ac00000000
all incoming transaction values validated
"""
)
]
class CmdTxTest(unittest.TestCase):
def get_tempdir(self):
return tempfile.mkdtemp()
def launch_tool(self, tool_args, env={}):
# set
python_path = sys.executable
script_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "pycoin", "scripts"))
args = tool_args.split()
script_path = os.path.join(script_dir, args[0])
output = subprocess.check_output([python_path, script_path] + args[1:], env=env)
return output.decode("utf8")
def test_cases(self):
cache_dir = tempfile.mkdtemp()
os.chdir(cache_dir)
env = dict(PYCOIN_CACHE_DIR=cache_dir)
for cmd, expected_output in TEST_CASES:
actual_output = self.launch_tool(cmd, env=env)
if actual_output != expected_output:
print(repr(cmd))
print(repr(actual_output))
print(repr(expected_output))
self.assertEqual(expected_output, actual_output)
def main():
unittest.main()
if __name__ == "__main__":
main()
| 62.415205
| 1,062
| 0.84353
| 679
| 10,673
| 13.197349
| 0.217968
| 0.01607
| 0.015623
| 0.012499
| 0.431202
| 0.396608
| 0.369825
| 0.340364
| 0.330767
| 0.320612
| 0
| 0.449116
| 0.115244
| 10,673
| 170
| 1,063
| 62.782353
| 0.499841
| 0.002249
| 0
| 0.177215
| 0
| 0
| 0.6651
| 0.531901
| 0
| 1
| 0
| 0
| 0.012658
| 1
| 0.050633
| false
| 0
| 0.063291
| 0.012658
| 0.151899
| 0.037975
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81e5e74f8ba8c93b05eab893fc922d4d8cf28232
| 8,762
|
py
|
Python
|
url.py
|
wilkystyle/open-url
|
f6c8718b1e0f85f5c0de6676c749a310137951b0
|
[
"MIT"
] | 78
|
2015-01-01T02:18:33.000Z
|
2022-03-22T20:39:20.000Z
|
url.py
|
wilkystyle/open-url
|
f6c8718b1e0f85f5c0de6676c749a310137951b0
|
[
"MIT"
] | 41
|
2015-04-19T16:18:09.000Z
|
2021-11-09T01:46:08.000Z
|
url.py
|
wilkystyle/open-url
|
f6c8718b1e0f85f5c0de6676c749a310137951b0
|
[
"MIT"
] | 18
|
2015-06-08T02:09:58.000Z
|
2021-09-24T09:05:13.000Z
|
import re
# list of known TLDs, e.g. "co"
domains = 'aaa|aarp|abb|abbott|abbvie|abogado|abudhabi|ac|academy|accenture|accountant|accountants|aco|active|actor|ad|adac|ads|adult|ae|aeg|aero|af|afl|ag|agakhan|agency|ai|aig|airforce|airtel|akdn|al|alibaba|alipay|allfinanz|ally|alsace|am|amica|amsterdam|analytics|android|anquan|ao|apartments|app|apple|aq|aquarelle|ar|aramco|archi|army|arpa|arte|as|asia|associates|at|attorney|au|auction|audi|audio|author|auto|autos|avianca|aw|aws|ax|axa|az|azure|ba|baby|baidu|band|bank|bar|barcelona|barclaycard|barclays|barefoot|bargains|bauhaus|bayern|bb|bbc|bbva|bcg|bcn|bd|be|beats|beer|bentley|berlin|best|bet|bf|bg|bh|bharti|bi|bible|bid|bike|bing|bingo|bio|biz|bj|black|blackfriday|bloomberg|blue|bm|bms|bmw|bn|bnl|bnpparibas|bo|boats|boehringer|bom|bond|boo|book|boots|bosch|bostik|bot|boutique|br|bradesco|bridgestone|broadway|broker|brother|brussels|bs|bt|budapest|bugatti|build|builders|business|buy|buzz|bv|bw|by|bz|bzh|ca|cab|cafe|cal|call|camera|camp|cancerresearch|canon|capetown|capital|car|caravan|cards|care|career|careers|cars|cartier|casa|cash|casino|cat|catering|cba|cbn|cc|cd|ceb|center|ceo|cern|cf|cfa|cfd|cg|ch|chanel|channel|chase|chat|cheap|chloe|christmas|chrome|church|ci|cipriani|circle|cisco|citic|city|cityeats|ck|cl|claims|cleaning|click|clinic|clinique|clothing|cloud|club|clubmed|cm|cn|co|coach|codes|coffee|college|cologne|com|commbank|community|company|compare|computer|comsec|condos|construction|consulting|contact|contractors|cooking|cool|coop|corsica|country|coupon|coupons|courses|cr|credit|creditcard|creditunion|cricket|crown|crs|cruises|csc|cu|cuisinella|cv|cw|cx|cy|cymru|cyou|cz|dabur|dad|dance|date|dating|datsun|day|dclk|dds|de|dealer|deals|degree|delivery|dell|deloitte|delta|democrat|dental|dentist|desi|design|dev|diamonds|diet|digital|direct|directory|discount|dj|dk|dm|dnp|do|docs|dog|doha|domains|download|drive|dubai|durban|dvag|dz|earth|eat|ec|edeka|edu|education|ee|eg|email|emerck|energy|engineer|engineering|enterprises|epson|equipment|er|erni|es|esq|estate|et|eu|eurovision|eus|events|everbank|exchange|expert|exposed|express|extraspace|fage|fail|fairwinds|faith|family|fan|fans|farm|fashion|fast|feedback|ferrero|fi|film|final|finance|financial|firestone|firmdale|fish|fishing|fit|fitness|fj|fk|flickr|flights|flir|florist|flowers|flsmidth|fly|fm|fo|foo|football|ford|forex|forsale|forum|foundation|fox|fr|fresenius|frl|frogans|frontier|ftr|fund|furniture|futbol|fyi|ga|gal|gallery|gallo|gallup|game|garden|gb|gbiz|gd|gdn|ge|gea|gent|genting|gf|gg|ggee|gh|gi|gift|gifts|gives|giving|gl|glass|gle|global|globo|gm|gmail|gmbh|gmo|gmx|gn|gold|goldpoint|golf|goo|goog|google|gop|got|gov|gp|gq|gr|grainger|graphics|gratis|green|gripe|group|gs|gt|gu|guardian|gucci|guge|guide|guitars|guru|gw|gy|hamburg|hangout|haus|hdfcbank|health|healthcare|help|helsinki|here|hermes|hiphop|hitachi|hiv|hk|hkt|hm|hn|hockey|holdings|holiday|homedepot|homes|honda|horse|host|hosting|hoteles|hotmail|house|how|hr|hsbc|ht|htc|hu|hyundai|ibm|icbc|ice|icu|id|ie|ifm|iinet|il|im|imamat|immo|immobilien|in|industries|infiniti|info|ing|ink|institute|insurance|insure|int|international|investments|io|ipiranga|iq|ir|irish|is|iselect|ismaili|ist|istanbul|it|itau|iwc|jaguar|java|jcb|jcp|je|jetzt|jewelry|jlc|jll|jm|jmp|jnj|jo|jobs|joburg|jot|joy|jp|jpmorgan|jprs|juegos|kaufen|kddi|ke|kerryhotels|kerrylogistics|kerryproperties|kfh|kg|kh|ki|kia|kim|kinder|kitchen|kiwi|km|kn|koeln|komatsu|kp|kpmg|kpn|kr|krd|kred|kuokgroup|kw|ky|kyoto|kz|la|lacaixa|lamborghini|lamer|lancaster|land|landrover|lanxess|lasalle|lat|latrobe|law|lawyer|lb|lc|lds|lease|leclerc|legal|lexus|lgbt|li|liaison|lidl|life|lifeinsurance|lifestyle|lighting|like|limited|limo|lincoln|linde|link|lipsy|live|living|lixil|lk|loan|loans|locus|lol|london|lotte|lotto|love|lr|ls|lt|ltd|ltda|lu|lupin|luxe|luxury|lv|ly|ma|madrid|maif|maison|makeup|man|management|mango|market|marketing|markets|marriott|mba|mc|md|me|med|media|meet|melbourne|meme|memorial|men|menu|meo|metlife|mg|mh|miami|microsoft|mil|mini|mk|ml|mls|mm|mma|mn|mo|mobi|mobily|moda|moe|moi|mom|monash|money|montblanc|mormon|mortgage|moscow|motorcycles|mov|movie|movistar|mp|mq|mr|ms|mt|mtn|mtpc|mtr|mu|museum|mutual|mutuelle|mv|mw|mx|my|mz|na|nadex|nagoya|name|natura|navy|nc|ne|nec|net|netbank|network|neustar|new|news|next|nextdirect|nexus|nf|ng|ngo|nhk|ni|nico|nikon|ninja|nissan|nissay|nl|no|nokia|northwesternmutual|norton|nowruz|nowtv|np|nr|nra|nrw|ntt|nu|nyc|nz|obi|office|okinawa|olayan|olayangroup|om|omega|one|ong|onl|online|ooo|oracle|orange|org|organic|origins|osaka|otsuka|ovh|pa|page|pamperedchef|panerai|paris|pars|partners|parts|party|passagens|pccw|pe|pet|pf|pg|ph|pharmacy|philips|photo|photography|photos|physio|piaget|pics|pictet|pictures|pid|pin|ping|pink|pizza|pk|pl|place|play|playstation|plumbing|plus|pm|pn|pohl|poker|porn|post|pr|praxi|press|pro|prod|productions|prof|progressive|promo|properties|property|protection|ps|pt|pub|pw|pwc|py|qa|qpon|quebec|quest|racing|re|read|realtor|realty|recipes|red|redstone|redumbrella|rehab|reise|reisen|reit|ren|rent|rentals|repair|report|republican|rest|restaurant|review|reviews|rexroth|rich|richardli|ricoh|rio|rip|ro|rocher|rocks|rodeo|room|rs|rsvp|ru|ruhr|run|rw|rwe|ryukyu|sa|saarland|safe|safety|sakura|sale|salon|samsung|sandvik|sandvikcoromant|sanofi|sap|sapo|sarl|sas|saxo|sb|sbi|sbs|sc|sca|scb|schaeffler|schmidt|scholarships|school|schule|schwarz|science|scor|scot|sd|se|seat|security|seek|select|sener|services|seven|sew|sex|sexy|sfr|sg|sh|sharp|shaw|shell|shia|shiksha|shoes|shouji|show|shriram|si|sina|singles|site|sj|sk|ski|skin|sky|skype|sl|sm|smile|sn|sncf|so|soccer|social|softbank|software|sohu|solar|solutions|song|sony|soy|space|spiegel|spot|spreadbetting|sr|srl|st|stada|star|starhub|statebank|statefarm|statoil|stc|stcgroup|stockholm|storage|store|stream|studio|study|style|su|sucks|supplies|supply|support|surf|surgery|suzuki|sv|swatch|swiss|sx|sy|sydney|symantec|systems|sz|tab|taipei|talk|taobao|tatamotors|tatar|tattoo|tax|taxi|tc|tci|td|team|tech|technology|tel|telecity|telefonica|temasek|tennis|teva|tf|tg|th|thd|theater|theatre|tickets|tienda|tiffany|tips|tires|tirol|tj|tk|tl|tm|tmall|tn|to|today|tokyo|tools|top|toray|toshiba|total|tours|town|toyota|toys|tr|trade|trading|training|travel|travelers|travelersinsurance|trust|trv|tt|tube|tui|tunes|tushu|tv|tvs|tw|tz|ua|ubs|ug|uk|unicom|university|uno|uol|us|uy|uz|va|vacations|vana|vc|ve|vegas|ventures|verisign|versicherung|vet|vg|vi|viajes|video|vig|viking|villas|vin|vip|virgin|vision|vista|vistaprint|viva|vlaanderen|vn|vodka|volkswagen|vote|voting|voto|voyage|vu|vuelos|wales|walter|wang|wanggou|warman|watch|watches|weather|weatherchannel|webcam|weber|website|wed|wedding|weibo|weir|wf|whoswho|wien|wiki|williamhill|win|windows|wine|wme|wolterskluwer|work|works|world|ws|wtc|wtf|xbox|xerox|xihuan|xin|xn--11b4c3d|xn--1ck2e1b|xn--1qqw23a|xn--30rr7y|xn--3bst00m|xn--3ds443g|xn--3e0b707e|xn--3pxu8k|xn--42c2d9a|xn--45brj9c|xn--45q11c|xn--4gbrim|xn--55qw42g|xn--55qx5d|xn--5tzm5g|xn--6frz82g|xn--6qq986b3xl|xn--80adxhks|xn--80ao21a|xn--80asehdb|xn--80aswg|xn--8y0a063a|xn--90a3ac|xn--90ais|xn--9dbq2a|xn--9et52u|xn--9krt00a|xn--b4w605ferd|xn--bck1b9a5dre4c|xn--c1avg|xn--c2br7g|xn--cck2b3b|xn--cg4bki|xn--clchc0ea0b2g2a9gcd|xn--czr694b|xn--czrs0t|xn--czru2d|xn--d1acj3b|xn--d1alf|xn--e1a4c|xn--eckvdtc9d|xn--efvy88h|xn--estv75g|xn--fct429k|xn--fhbei|xn--fiq228c5hs|xn--fiq64b|xn--fiqs8s|xn--fiqz9s|xn--fjq720a|xn--flw351e|xn--fpcrj9c3d|xn--fzc2c9e2c|xn--fzys8d69uvgm|xn--g2xx48c|xn--gckr3f0f|xn--gecrj9c|xn--h2brj9c|xn--hxt814e|xn--i1b6b1a6a2e|xn--imr513n|xn--io0a7i|xn--j1aef|xn--j1amh|xn--j6w193g|xn--jlq61u9w7b|xn--jvr189m|xn--kcrx77d1x4a|xn--kprw13d|xn--kpry57d|xn--kpu716f|xn--kput3i|xn--l1acc|xn--lgbbat1ad8j|xn--mgb9awbf|xn--mgba3a3ejt|xn--mgba3a4f16a|xn--mgba7c0bbn0a|xn--mgbaam7a8h|xn--mgbab2bd|xn--mgbayh7gpa|xn--mgbb9fbpob|xn--mgbbh1a71e|xn--mgbc0a9azcg|xn--mgbca7dzdo|xn--mgberp4a5d4ar|xn--mgbpl2fh|xn--mgbt3dhd|xn--mgbtx2b|xn--mgbx4cd0ab|xn--mix891f|xn--mk1bu44c|xn--mxtq1m|xn--ngbc5azd|xn--ngbe9e0a|xn--node|xn--nqv7f|xn--nqv7fs00ema|xn--nyqy26a|xn--o3cw4h|xn--ogbpf8fl|xn--p1acf|xn--p1ai|xn--pbt977c|xn--pgbs0dh|xn--pssy2u|xn--q9jyb4c|xn--qcka1pmc|xn--qxam|xn--rhqv96g|xn--rovu88b|xn--s9brj9c|xn--ses554g|xn--t60b56a|xn--tckwe|xn--unup4y|xn--vermgensberater-ctb|xn--vermgensberatung-pwb|xn--vhquv|xn--vuq861b|xn--w4r85el8fhu5dnra|xn--w4rs40l|xn--wgbh1c|xn--wgbl6a|xn--xhq521b|xn--xkc2al3hye2a|xn--xkc2dl3a5ee0h|xn--y9a3aq|xn--yfro4i67o|xn--ygbi2ammx|xn--zfr164b|xperia|xxx|xyz|yachts|yahoo|yamaxun|yandex|ye|yodobashi|yoga|yokohama|you|youtube|yt|yun|za|zara|zero|zip|zm|zone|zuerich|zw'
def is_url(path: str) -> bool:
return bool(re.search(r"\w[^\s]*\.(?:%s)(/[^\s]*)?\Z" % domains, path, re.IGNORECASE))
| 876.2
| 8,593
| 0.811459
| 1,472
| 8,762
| 4.829484
| 0.904212
| 0.000563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033902
| 0.003538
| 8,762
| 9
| 8,594
| 973.555556
| 0.780323
| 0.00331
| 0
| 0
| 0
| 0.25
| 0.986027
| 0.986027
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0.25
| 0.75
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 10
|
c492ab0724be79682b9499f1c9d759bcd7d17c8d
| 32,758
|
py
|
Python
|
HPnex/multiclass_validation.py
|
PanditPranav/PREDICT_network_analysis
|
8e7d24b3e549c818c99fb132ac7e190d6561df6f
|
[
"MIT"
] | null | null | null |
HPnex/multiclass_validation.py
|
PanditPranav/PREDICT_network_analysis
|
8e7d24b3e549c818c99fb132ac7e190d6561df6f
|
[
"MIT"
] | null | null | null |
HPnex/multiclass_validation.py
|
PanditPranav/PREDICT_network_analysis
|
8e7d24b3e549c818c99fb132ac7e190d6561df6f
|
[
"MIT"
] | null | null | null |
"""Running basic code:
Importing packages, setting working directory,
printing out date"""
import os as os
os.chdir('C:/Users/falco/Desktop/directory/Missing_links_in_viral_host_communities/')
import datetime as dt
str(dt.datetime.now())
from sklearn.metrics import confusion_matrix
import seaborn as sns
#from pandas_ml import ConfusionMatrix
data_path = 'C:/Users/falco/Desktop/directory/Missing_links_in_viral_host_communities/data'
output_path = 'C:/Users/falco/Desktop/directory/Missing_links_in_viral_host_communities/outputs'
from HPnex import functions as f
from HPnex import classification as classify
from HPnex import fitting_functions as fitt
import numpy as np
import networkx as nx
#np.random.seed(42)
from sklearn.ensemble import RandomForestClassifier
#from pandas_ml import ConfusionMatrix
from matplotlib import pyplot as plt
import seaborn as sns
import scipy.stats as stats
from sklearn import model_selection
import math
height = 6
font = 12
import sklearn
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier, AdaBoostClassifier
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import SVC, LinearSVC
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix
from sklearn.pipeline import Pipeline
from sklearn.model_selection import GridSearchCV
#from sklearn.cross_validation import
from sklearn.model_selection import StratifiedKFold ,cross_val_score, train_test_split, cross_val_predict
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import learning_curve
#from pandas_ml import ConfusionMatrix
from textblob import TextBlob
from sklearn.linear_model import SGDClassifier
from sklearn.ensemble import ExtraTreesClassifier, RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import accuracy_score
from xgboost import XGBClassifier
#### Standardize continuous variables
from sklearn.preprocessing import StandardScaler
from sklearn import preprocessing
#from pandas_ml import ConfusionMatrix
from HPnex import functions as f
### Running cross validation scores and predictions
from sklearn.model_selection import StratifiedKFold ,cross_val_score, train_test_split, cross_val_predict
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix, precision_recall_fscore_support
import matplotlib.style as style
style.use('fivethirtyeight')
plt.rcParams['font.family'] = 'Times New Roman'
sns.set_context("notebook", font_scale=1.30, rc={"lines.linewidth": 0.8})
import itertools as itertools
import pandas as pd
import joblib
###############################################################################################################################
###############################################################################################################################
def generete_temp_network(virus, hosts, ViralFamily, PubMed, BPnx_group, Gc,IUCN, virus_df):
#print('this function is in multiclass validation file 1st function')
import math
temp_BPnx = BPnx_group.copy()
#print (temp_BPnx.number_of_nodes()) ## checking number of nodes
virus_nodes = [x for x,y in temp_BPnx.nodes(data=True) if y['type']=='virus'] #creating list of virus nodes from bipartite network
df = pd.DataFrame({'Virus2':virus_nodes}) # converting them to a dataframe
df['Virus1'] = virus # dataframe with all possible combinations of new virus and viruses from BPnx
temp_BPnx.add_node(virus, virusname=virus, type='virus', bipartite = 1) ## adding new node to the Bpnxtemp
#print (temp_BPnx.number_of_nodes()) ## rechecking number of nodes
for h in hosts:
temp_BPnx.add_edge(virus, h) ## adding new edge to the Bpnxtemp
def get_n_shared_hosts(c): ## calculating number of neighbours for our new virus
return len(list(nx.common_neighbors(temp_BPnx, c['Virus1'],c['Virus2'])))
df['n_shared_hosts'] = df.apply(get_n_shared_hosts, axis=1)
def addsharedhosts (c): ## identifiying number of neighbours for our new virus
return sorted(nx.common_neighbors(temp_BPnx, c['Virus1'],c['Virus2']))
df["shared_hosts"] = df.apply(addsharedhosts, axis=1)
def add_hosts_orders (c):
order_list = IUCN[IUCN.ScientificName.isin(c['shared_hosts'])]['Order'].unique().tolist()
return order_list
df["shared_orders"] = df.apply(add_hosts_orders, axis=1)
new_edges = df[df['n_shared_hosts']>0] ### list of new edges for new viruses
#print(new_edges.shape)
Gc_temp = Gc.copy() ## creating a temporary copy of GC complete
Gc_temp.add_node(virus, ViralFamily=ViralFamily, type='virus', bipartite = 1) ## adding new node to the Bpnxtemp
for index, row in new_edges.iterrows():
if row['n_shared_hosts'] > 0:
Gc_temp.add_edge(row['Virus1'], row['Virus2'], weight = row['n_shared_hosts'], hosts = ','.join(row['shared_hosts']),
orders = ','.join(row['shared_orders']))
#edges_to_predict = df[df['n_shared_hosts']==0]
edges_to_predict = df
edges_to_predict = edges_to_predict[edges_to_predict.Virus2 != 'nan']
virus_df_temp = virus_df.copy()
virus_df_temp.loc[len(virus_df_temp)]=[virus, ViralFamily, math.log(PubMed),1, 1]
return Gc_temp, edges_to_predict, virus_df_temp
###############################################################################################################################
###############################################################################################################################
def prediction(temp_x, clf_multi, inv_dictionary):
#print('this function is in multiclass validation file 1st function')
inv_dictionary = dict((k, v.title()) for k,v in inv_dictionary.iteritems())
Order_prediction = pd.DataFrame(clf_multi.predict(temp_x)).replace(inv_dictionary)
temp_x.columns = ['f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9']
#temp_x.columns = ['f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10', 'f11','f12', 'f13']
probs = clf_multi.predict_proba(temp_x)
prob_max =[]
for i in range (len(probs)):
prob_max.append(np.amax(probs[i], axis = 1))
max_prob = pd.DataFrame(prob_max).T
prediction = Order_prediction.join(max_prob, lsuffix='_pr', rsuffix='_shakyata')
return prediction
###############################################################################################################################
###############################################################################################################################
def cross_validation_predict(virus, hosts, ViralFamily, PubMed, BPnx_group, Gc, virus_df, clf_multi, inv_dictionary):
#print('this function is in multiclass validation file')
from HPnex import predict_multi as pred_m
Gc_temp_group, edges_to_predict, virus_df_temp = generete_temp_network(virus = virus,
hosts = hosts,
ViralFamily = ViralFamily,
PubMed = PubMed,
BPnx_group = BPnx_group,
Gc = Gc,
virus_df = virus_df)
temp_x = pred_m.preprocessing_x(data_frame = edges_to_predict,
network = Gc_temp_group,
virus_df_temp = virus_df_temp,
virus_df = virus_df)
pred_group = prediction(temp_x =temp_x, clf_multi =clf_multi, inv_dictionary = inv_dictionary)
result_group = pred_group.join(edges_to_predict)
return result_group, edges_to_predict
###############################################################################################################################
###############################################################################################################################
def generete_temp_network(virus, hosts, ViralFamily, PubMed, BPnx_group, Gc,IUCN, virus_df):
#print('this function is in multiclass validation file')
import math
temp_BPnx = BPnx_group.copy()
#print (temp_BPnx.number_of_nodes()) ## checking number of nodes
#virus_nodes = [x for x,y in temp_BPnx.nodes(data=True) if y['type']=='virus']
q_df = pd.DataFrame.from_dict(dict(BPnx_group.nodes(data=True)), orient='index')
q_df = q_df.loc[q_df.index.dropna()]
virus_nodes = q_df[q_df['type'] == 'virus'].index.tolist()#creating list of virus nodes from bipartite network
df = pd.DataFrame({'Virus2':virus_nodes}) # converting them to a dataframe
df['Virus1'] = virus # dataframe with all possible combinations of new virus and viruses from BPnx
temp_BPnx.add_node(virus, virusname=virus, type='virus', bipartite = 1) ## adding new node to the Bpnxtemp
#print (temp_BPnx.number_of_nodes()) ## rechecking number of nodes
for h in hosts:
temp_BPnx.add_edge(virus, h) ## adding new edge to the Bpnxtemp
def get_n_shared_hosts(c): ## calculating number of neighbours for our new virus
return len(list(nx.common_neighbors(temp_BPnx, c['Virus1'],c['Virus2'])))
df['n_shared_hosts'] = df.apply(get_n_shared_hosts, axis=1)
def addsharedhosts (c): ## identifiying number of neighbours for our new virus
return sorted(nx.common_neighbors(temp_BPnx, c['Virus1'],c['Virus2']))
df["shared_hosts"] = df.apply(addsharedhosts, axis=1)
def add_hosts_orders (c):
order_list = IUCN[IUCN.ScientificName.isin(c['shared_hosts'])]['Order'].unique().tolist()
return order_list
df["shared_orders"] = df.apply(add_hosts_orders, axis=1)
new_edges = df[df['n_shared_hosts']>0] ### list of new edges for new viruses
#print(new_edges.shape)
Gc_temp = Gc.copy() ## creating a temporary copy of GC complete
Gc_temp.add_node(virus, ViralFamily=ViralFamily, type='virus', bipartite = 1) ## adding new node to the Bpnxtemp
for index, row in new_edges.iterrows():
if row['n_shared_hosts'] > 0:
Gc_temp.add_edge(row['Virus1'], row['Virus2'], weight = row['n_shared_hosts'], hosts = ','.join(row['shared_hosts']),
orders = ','.join(row['shared_orders']))
#edges_to_predict = df[df['n_shared_hosts']==0]
edges_to_predict = df
edges_to_predict = edges_to_predict[edges_to_predict.Virus2 != 'nan']
virus_df_temp = virus_df.copy()
virus_df_temp.loc[len(virus_df_temp)]=[virus, ViralFamily, math.log(PubMed),1, 1]
return Gc_temp, edges_to_predict, virus_df_temp
def prediction(temp_x, clf_multi, inv_dictionary):
#print('prediction function is in multiclass validation file 2nd function')
#print(temp_x.shape)
inv_dictionary = dict((k, v.title()) for k,v in inv_dictionary.iteritems())
temp_x.columns = ['f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9']
#temp_x.columns = ['f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10', 'f11', 'f12', 'f13']
Order_prediction = pd.DataFrame(clf_multi.predict(temp_x)).replace(inv_dictionary)
#print (Order_prediction.shape)
probs = clf_multi.predict_proba(temp_x)
prob_max =[]
for i in range (len(probs)):
prob_max.append(np.amax(probs[i], axis = 1))
max_prob = pd.DataFrame(prob_max).T
prediction = Order_prediction.join(max_prob, lsuffix='_pr', rsuffix='_shakyata')
return prediction
def cross_validation_predict(virus, hosts, ViralFamily, PubMed, BPnx_group, Gc, virus_df, clf_multi, inv_dictionary, IUCN):
#print('cross_validation_predict function is in multiclass validation file')
from HPnex import predict_multi as pred_m
Gc_temp_group, edges_to_predict, virus_df_temp = generete_temp_network(virus = virus,
hosts = hosts,
ViralFamily = ViralFamily,
PubMed = PubMed,
BPnx_group = BPnx_group,
IUCN = IUCN,
Gc = Gc,
virus_df = virus_df)
temp_x = pred_m.preprocessing_x(data_frame = edges_to_predict,
network = Gc_temp_group,
virus_df_temp = virus_df_temp,
virus_df = virus_df)
pred_group = prediction(temp_x =temp_x, clf_multi =clf_multi, inv_dictionary = inv_dictionary)
result_group = pred_group.join(edges_to_predict)
return result_group, edges_to_predict
###############################################################################################################################
###############################################################################################################################
def run_cross_validation(i, df, XGB, data_path, virus_df, IUCN):
print('run_cross_validation function is in multiclass validation file')
from HPnex import functions as f
from HPnex import classification as classify
from HPnex import fitting_functions as fitt
print('running model for group '+ str(i) )
df_temp = df[df.group != i]
import pickle
dictionary = pickle.load(open("C:/Users/falco/Desktop/directory/Missing_links_in_viral_host_communities/outputs/dictionary_order_humans.pkl", "rb"))
inv_dictionary = {v: k for k, v in dictionary.iteritems()}
print ("first construct bipartite network to reterive original data information about shared hosts")
BPnx_group = f.construct_bipartite_taxa_virus_network(
dataframe=df_temp,
taxa_level = 'Order',
network_name='Go',
plot=False,
filter_file=False,
taxonomic_filter=None)
print('generation of observed network after removing group '+ str(i))
Gc_df, Gc = f.construct_unipartite_taxa_level_virus_virus_network(
dataframe=df_temp,
taxa_level = 'Order',
network_name='Gc Order level',
layout_func='fruchterman_reingold',
plot=False,
filter_file=False,
taxonomic_filter=None,
return_df=True)
print ('getting network data for Observed network using Gc and BPnx_group')
Multiclass_data = fitt.get_complete_network_data_for_fitting_multiclass(Gc = Gc, BPnx = BPnx_group, data_path= data_path,
virus_df = virus_df, Species_file_name='\IUCN Mammals, Birds, Reptiles, and Amphibians.csv')
print('preprocessing data for fitting model')
from xgboost import XGBClassifier
#### Standardize continuous variables
from sklearn.preprocessing import StandardScaler
from sklearn import preprocessing
from pandas_ml import ConfusionMatrix
from HPnex import functions as f
### Running cross validation scores and predictions
from sklearn.model_selection import StratifiedKFold ,cross_val_score, train_test_split, cross_val_predict
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix, precision_recall_fscore_support
model_data = Multiclass_data
from sklearn.metrics import classification_report, f1_score
from sklearn.model_selection import StratifiedKFold, StratifiedShuffleSplit
#def run_mulitlabel_model(model_data, cv, rf, virus_df, Gc_data):
#predictors = [
# 'jaccard', 'betweeness_diff', 'in_same_cluster', 'degree_diff',
# 'FamilyMatch', 'PubMed_diff', 'PubMed_Search_ln1', 'PubMed_Search_ln2', 'neighbors_n',
# 'adamic_adar', 'resource', 'preferential_attach'
#]
predictors = [
'jaccard', 'betweeness_diff', 'in_same_cluster', 'degree_diff',
'FamilyMatch', 'PubMed_diff', 'PubMed_Search_ln1', 'PubMed_Search_ln2',
]
import sklearn
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.multioutput import MultiOutputClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.ensemble import ExtraTreesClassifier, RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import accuracy_score
from sklearn.svm import SVC, LinearSVC
from sklearn import preprocessing
from sklearn_pandas import DataFrameMapper
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix
model_data['shared_hosts_label'] = model_data['orders_label'].apply(lambda y: ['No_Sharing'] if len(y)==0 else y)
Y_ml_df = model_data['shared_hosts_label'].apply(str).str.strip("['']").str.replace("'", "").str.strip().str.split(', ', expand = True)
Y_ml_df = Y_ml_df.replace(dictionary)
X = model_data[list(predictors)].values
#### Standardize continuous variables
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X_std = scaler.fit_transform(X)
data_processed = pd.DataFrame(X_std, columns=predictors)
#data_processed.head()
### Encoding categorical variables
le = preprocessing.LabelEncoder()
le.fit(virus_df.viral_family.unique())
model_data['F1'] = le.transform(model_data.ViralFamily1.fillna('Not_Assinged'))
model_data['F2'] = le.transform(model_data.ViralFamily2.fillna('Not_Assinged'))
data_processed['F1'] = model_data.F1
data_processed['F2'] = model_data.F2
data_processed.fillna(0, inplace=True)
print('fitting the model for group '+ str(i))
from HPnex import functions as f
from sklearn.model_selection import cross_val_predict, cross_val_score
from sklearn.multioutput import MultiOutputClassifier
XGB = XGB
multi_target_classifier = MultiOutputClassifier(XGB, n_jobs=1)
multi_target_classifier.fit(data_processed, Y_ml_df.fillna(19).values)
print(multi_target_classifier)
print ('predicting using fitted model for group '+ str(i))
predict_df = df[df.group == i]
predict_df = predict_df.groupby('Virus').agg({'Order':'unique',
'viral_family':'unique',
'PubMed_Search':'unique'}) #,ScientificName , 'PubMed_Search']
predict_df['viral_family'] = predict_df['viral_family'].str.get(0)
predict_df['PubMed_Search'] = predict_df['PubMed_Search'].str.get(0).astype(int)
predict_df.reset_index(inplace = True)
print ('running predictions')
RESULT = []
e_predict = []
for index, row in predict_df.dropna().iterrows():
result, edges_to_predict = cross_validation_predict(virus =row['Virus'],
hosts = row['Order'],
PubMed = row['PubMed_Search'],
ViralFamily = row['viral_family'],
BPnx_group = BPnx_group,
Gc = Gc,
virus_df = virus_df,
clf_multi = multi_target_classifier,
inv_dictionary = inv_dictionary,
IUCN = IUCN)
RESULT.append(result)
e_predict.append(edges_to_predict)
result_group = pd.concat(RESULT, axis=0)
edges_group = pd.concat(e_predict, axis=0)
return result_group, edges_group
#######################################################################################################################################
#######################################################################################################################################
def run_cross_validation(i, df, XGB, data_path, virus_df, IUCN):
print('run_cross_validation function is in multiclass validation file')
from HPnex import functions as f
from HPnex import classification as classify
from HPnex import fitting_functions as fitt
print('running model for group '+ str(i) )
df_temp = df[df.group != i]
import pickle
dictionary = pickle.load(open("C:/Users/falco/Desktop/directory/Missing_links_in_viral_host_communities/outputs/dictionary_order_humans.pkl", "rb"))
inv_dictionary = {v: k for k, v in dictionary.iteritems()}
print ("first construct bipartite network to reterive original data information about shared hosts")
BPnx_group = f.construct_bipartite_host_virus_network(
dataframe=df_temp,
network_name='Go',
plot=False,
filter_file=False,
taxonomic_filter=None)
print('generation of observed network after removing group '+ str(i))
Gc_df, Gc = f.construct_unipartite_virus_virus_network_order(
dataframe=df_temp,
network_name='all_network',
IUCN = IUCN,
layout_func='fruchterman_reingold',
plot=False,
filter_file=False,
taxonomic_filter=None,
return_df=True)
print ('getting network data for Observed network using Gc and BPnx_group')
Multiclass_data = fitt.get_complete_network_data_for_fitting_multiclass(Gc = Gc, BPnx = BPnx_group, data_path= data_path,
virus_df = virus_df, Species_file_name='\IUCN Mammals, Birds, Reptiles, and Amphibians.csv')
print('preprocessing data for fitting model')
from xgboost import XGBClassifier
#### Standardize continuous variables
from sklearn.preprocessing import StandardScaler
from sklearn import preprocessing
from pandas_ml import ConfusionMatrix
from HPnex import functions as f
### Running cross validation scores and predictions
from sklearn.model_selection import StratifiedKFold ,cross_val_score, train_test_split, cross_val_predict
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix, precision_recall_fscore_support
model_data = Multiclass_data
from sklearn.metrics import classification_report, f1_score
from sklearn.model_selection import StratifiedKFold, StratifiedShuffleSplit
#def run_mulitlabel_model(model_data, cv, rf, virus_df, Gc_data):
#predictors = [
# 'jaccard', 'betweeness_diff', 'in_same_cluster', 'degree_diff',
# 'FamilyMatch', 'PubMed_diff', 'PubMed_Search_ln1', 'PubMed_Search_ln2', 'neighbors_n',
# 'adamic_adar', 'resource', 'preferential_attach'
#]
predictors = [
'jaccard', 'betweeness_diff', 'in_same_cluster', 'degree_diff',
'FamilyMatch', 'PubMed_diff', 'PubMed_Search_ln1', 'PubMed_Search_ln2'
]
#predictors = [
# 'jaccard', 'betweeness_diff', 'in_same_cluster', 'degree_diff',
# 'FamilyMatch', 'PubMed_diff', 'PubMed_Search_ln1', 'PubMed_Search_ln2',
# 'VirusCluster1', 'VirusCluster2', 'resource', 'preferential_attach'
#]
import sklearn
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.multioutput import MultiOutputClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.ensemble import ExtraTreesClassifier, RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import accuracy_score
from sklearn.svm import SVC, LinearSVC
from sklearn import preprocessing
from sklearn_pandas import DataFrameMapper
from sklearn.metrics import classification_report, f1_score, accuracy_score, confusion_matrix
model_data['shared_hosts_label'] = model_data['orders_label'].apply(lambda y: ['No_Sharing'] if len(y)==0 else y)
Y_ml_df = model_data['shared_hosts_label'].apply(str).str.strip("['']").str.replace("'", "").str.strip().str.split(', ', expand = True)
Y_ml_df = Y_ml_df.replace(dictionary)
X = model_data[list(predictors)].values
#### Standardize continuous variables
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X_std = scaler.fit_transform(X)
data_processed = pd.DataFrame(X_std, columns=predictors)
#data_processed.head()
### Encoding categorical variables
le = preprocessing.LabelEncoder()
le.fit(virus_df.viral_family.unique())
model_data['F1'] = le.transform(model_data.ViralFamily1.fillna('Not_Assinged'))
model_data['F2'] = le.transform(model_data.ViralFamily2.fillna('Not_Assinged'))
data_processed['F1'] = model_data.F1
data_processed['F2'] = model_data.F2
data_processed.fillna(0, inplace=True)
print('fitting the model for group '+ str(i))
from HPnex import functions as f
from sklearn.model_selection import cross_val_predict, cross_val_score
from sklearn.multioutput import MultiOutputClassifier
XGB = XGB
multi_target_classifier = MultiOutputClassifier(XGB, n_jobs=1)
multi_target_classifier.fit(data_processed, Y_ml_df.fillna(19).values)
print(multi_target_classifier)
print ('predicting using fitted model for group '+ str(i))
predict_df = df[df.group == i]
predict_df = predict_df.groupby('Virus').agg({
'ScientificName': 'unique',
'Order':'unique',
'viral_family':'unique',
'PubMed_Search':'unique'}) #,ScientificName , 'PubMed_Search']
print('scientific names')
predict_df['viral_family'] = predict_df['viral_family'].str.get(0)
predict_df['PubMed_Search'] = predict_df['PubMed_Search'].str.get(0).astype(int)
predict_df.reset_index(inplace = True)
print ('running predictions')
RESULT = []
e_predict = []
for index, row in predict_df.dropna().iterrows():
result, edges_to_predict = cross_validation_predict(virus =row['Virus'],
hosts = row['ScientificName'],
PubMed = row['PubMed_Search'],
ViralFamily = row['viral_family'],
BPnx_group = BPnx_group,
Gc = Gc,
virus_df = virus_df,
clf_multi = multi_target_classifier,
inv_dictionary = inv_dictionary,
IUCN = IUCN)
RESULT.append(result)
e_predict.append(edges_to_predict)
result_group = pd.concat(RESULT, axis=0)
edges_group = pd.concat(e_predict, axis=0)
return result_group, edges_group
#######################################################################################################################################
#######################################################################################################################################
def generate_score(cv_preds, cv_epreds, virus_df, i, plot = False):
print('generate_score function is in multiclass validation file')
r_group = pd.concat([cv_preds, cv_epreds], axis=1)
cols = r_group.filter(regex='_pr').columns.tolist()
#r_group['combined_orders']=r_group[['0_pr', '10_pr',u'11_pr', '12_pr', '13_pr', '14_pr', '15_pr', '16_pr', '17_pr', '1_pr',
#'2_pr', '3_pr', '4_pr', '5_pr', '6_pr', '7_pr', '8_pr', '9_pr']].values.tolist()
r_group['combined_orders']=r_group[cols].values.tolist()
r_group = r_group.loc[:,~r_group.columns.duplicated()]
r_group['shared_hosts'] = r_group['shared_orders'].apply(lambda y: ['No_Sharing'] if len(y)==0 else y)
r_group['combined_orders'] = r_group['combined_orders'].apply(lambda x: set(x))
r_group['shared_hosts'] = r_group['shared_hosts'].apply(lambda x: set(x))
r_group['shared_hosts'] = r_group['shared_hosts'].apply(lambda x: map(str.title, x))
print('accuracy matrix based on first prediction' )
a =r_group[['0_pr', 'shared_hosts']]
a = pd.concat([a, a.shared_hosts.apply(pd.Series)], axis=1)
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
cm = confusion_matrix(a['0_pr'], a[0])
print ('Accuracy Score :',accuracy_score(a['0_pr'], a[0]))
print('Classification Report : ')
print (classification_report(a['0_pr'], a[0]))
r_group['TP'] = [list(set(a).intersection(set(b))) for a, b in zip(r_group.shared_hosts, r_group.combined_orders)]
r_group['FP'] = [list(set(b).difference(set(a))) for a, b in zip(r_group.shared_hosts, r_group.combined_orders)]
r_group['FN'] = [list(set(a).difference(set(b))) for a, b in zip(r_group.shared_hosts, r_group.combined_orders)]
#r_group = pd.merge(r_group, virus_df, left_on='Virus1', right_on='virus_name', how='left')
r_group['group'] = i
#r_group.group.fillna(0, inplace= True)
m = []
for g in r_group.group.unique():
temp_r = r_group[r_group.group == g]
TP = temp_r['TP'].apply(pd.Series).stack().reset_index(drop=True).value_counts()
FP = temp_r['FP'].apply(pd.Series).stack().reset_index(drop=True).value_counts()
FN = temp_r['FN'].apply(pd.Series).stack().reset_index(drop=True).value_counts()
matrix_group = pd.concat([TP, FP, FN], axis = 1)
matrix_group.columns = ['TP', 'FP', 'FN']
matrix_group['Group'] = g
matrix_group['PPV'] = matrix_group.TP.fillna(0)/(matrix_group.TP.fillna(0)+ matrix_group.FP.fillna(0))
matrix_group['Sensitivity'] = matrix_group.TP.fillna(0)/(matrix_group.TP.fillna(0)+ matrix_group.FN.fillna(0))
m.append(matrix_group)
matrix = pd.concat(m, axis=0).reset_index()
matrix['support'] = matrix.TP+ matrix.FP +matrix.FN
matrix['f1-score'] = 2*((matrix['PPV']*matrix['Sensitivity'])/(matrix['PPV']+matrix['Sensitivity']))
matrix.columns = ['Order', 'TP', 'FP', 'FN', 'Group', 'PPV', 'Sensitivity', 'support', 'f1-score']
if plot:
import matplotlib.style as style
style.use('fivethirtyeight')
plt.rcParams['font.family'] = 'Times New Roman'
sns.set_context("notebook", font_scale=1.0, rc={"lines.linewidth": 0.8})
validation_matrix = matrix
fig, ((ax1, ax2),(ax3, ax4)) = plt.subplots(2, 2, figsize = [12,8], sharey= False)
sns.boxplot(x="support", y="Order", data=validation_matrix.dropna(), ax = ax1)
sns.stripplot(x="support", y="Order", data=validation_matrix.dropna(), jitter= True, color='#252525', ax= ax1)
ax1.set_xlabel('support')
ax1.set_title('Predicting Shared Host Order\n\n\n', horizontalalignment = 'center', loc = 'left', fontsize=16)
text1 = 'Sample size for validation of XGBoost model performance in correctly predicting the type of links (host order) between two viruses\nthat did not share hosts in the observed network '+ r'$G_o$'+ ' and shared hosts in '+ r'$G_c$'+'.\n'
ax1.text(-0.3, 0.99, text1, verticalalignment='bottom',
horizontalalignment='left',
transform=ax1.transAxes,
color='gray', fontsize=14)
ax1.set_xscale('log')
ax1.set_ylabel('')
sns.boxplot(x="f1-score", y="Order", data=validation_matrix.dropna(), ax = ax2)
sns.stripplot(x="f1-score", y="Order", data=validation_matrix.dropna(), jitter= True, color='#252525', ax= ax2)
ax2.set_xlabel('f1-score')
ax2.set_xlim(0,1.02)
ax2.set_ylabel('')
sns.boxplot(x="Sensitivity", y="Order", data=validation_matrix.dropna(), ax = ax3)
sns.stripplot(x="Sensitivity", y="Order", data=validation_matrix.dropna(), jitter= True, color='#252525', ax= ax3)
ax3.set_xlim(0,1.02)
ax3.set_xlabel('Sensitivity')
ax3.set_ylabel('')
sns.boxplot(x="PPV", y="Order", data=validation_matrix.dropna(), ax = ax4)
sns.stripplot(x="PPV", y="Order", data=validation_matrix.dropna(), jitter= True, color='#252525', ax= ax4)
ax4.set_xlim(0,1.02)
ax4.set_xlabel('Positive Predictive Value')
ax4.set_ylabel('')
plt.tight_layout()
#plt.savefig('outputs/XGBoost_order_prediction_performance.png', dpi = 600)
plt.show()
return matrix
| 50.552469
| 250
| 0.632273
| 3,957
| 32,758
| 5
| 0.119282
| 0.035027
| 0.016983
| 0.018196
| 0.857721
| 0.837453
| 0.822037
| 0.816932
| 0.802881
| 0.800455
| 0
| 0.011612
| 0.208712
| 32,758
| 647
| 251
| 50.630603
| 0.751669
| 0.120764
| 0
| 0.741866
| 0
| 0.002169
| 0.135915
| 0.016608
| 0.002169
| 0
| 0
| 0
| 0
| 1
| 0.032538
| false
| 0
| 0.227766
| 0.008677
| 0.292842
| 0.056399
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4fe5fa596e95192b5a685bf2d766682277a6ef1
| 96
|
py
|
Python
|
pqueue/__init__.py
|
GiliardGodoi/pqueue
|
80749d61e87c5d112c960ee77a8cce46017812e8
|
[
"MIT"
] | null | null | null |
pqueue/__init__.py
|
GiliardGodoi/pqueue
|
80749d61e87c5d112c960ee77a8cce46017812e8
|
[
"MIT"
] | null | null | null |
pqueue/__init__.py
|
GiliardGodoi/pqueue
|
80749d61e87c5d112c960ee77a8cce46017812e8
|
[
"MIT"
] | null | null | null |
from .main import PriorityQueue
from .main import PriorityQueue as PQueue
__version__ = '0.0.1'
| 24
| 41
| 0.791667
| 14
| 96
| 5.142857
| 0.642857
| 0.222222
| 0.388889
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 0.135417
| 96
| 4
| 42
| 24
| 0.831325
| 0
| 0
| 0
| 0
| 0
| 0.051546
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
48077c6b7737f38494cf37ad4e623bdd95c081ad
| 2,452
|
py
|
Python
|
tests/test_haystack_about.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/test_haystack_about.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/test_haystack_about.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
from unittest.mock import patch
import haystackapi
from haystackapi import Grid
from haystackapi.ops import HaystackHttpRequest
from haystackapi.providers import ping
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch('haystackapi.providers.haystack_interface.no_cache')
@patch.object(ping.Provider, 'about')
def test_about_with_zinc(mock, no_cache) -> None:
# GIVEN
no_cache.return_value = True
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
request.headers["Content-Type"] = mime_type
request.headers["Accept"] = mime_type
# WHEN
response = haystackapi.about(request, "dev")
# THEN
mock.assert_called_once_with("https://localhost/dev")
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch('haystackapi.providers.haystack_interface.no_cache')
@patch.object(ping.Provider, 'about')
def test_about_without_headers(mock, no_cache) -> None:
# GIVEN
no_cache.return_value = True
mock.return_value = Grid(columns=["a"])
mock.return_value.append({"a": 1})
mime_type = haystackapi.MODE_CSV
request = HaystackHttpRequest()
# WHEN
response = haystackapi.about(request, "dev")
# THEN
mock.assert_called_once_with("https://localhost/dev")
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
@patch.dict('os.environ', {'HAYSTACK_PROVIDER': 'haystackapi.providers.ping'})
@patch('haystackapi.providers.haystack_interface.no_cache')
@patch.object(ping.Provider, 'about')
def test_about_with_multivalues_headers(mock, no_cache) -> None:
# GIVEN
no_cache.return_value = True
mock.return_value = ping.PingGrid
mime_type = haystackapi.MODE_ZINC
request = HaystackHttpRequest()
request.headers["Accept"] = "text/zinc, application/json"
# WHEN
response = haystackapi.about(request, "dev")
# THEN
mock.assert_called_once_with("https://localhost/dev")
assert response.status_code == 200
assert response.headers["Content-Type"].startswith(mime_type)
assert haystackapi.parse(response.body, mime_type) is not None
| 34.535211
| 78
| 0.739804
| 305
| 2,452
| 5.757377
| 0.213115
| 0.050114
| 0.034169
| 0.030752
| 0.811503
| 0.811503
| 0.811503
| 0.811503
| 0.811503
| 0.811503
| 0
| 0.004737
| 0.13907
| 2,452
| 70
| 79
| 35.028571
| 0.827096
| 0.019168
| 0
| 0.708333
| 0
| 0
| 0.201253
| 0.093946
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.0625
| false
| 0
| 0.104167
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4813284525f417c55efd6ddc191ec1783da54c93
| 15,474
|
py
|
Python
|
map_gen_2/canvas.py
|
Xorgon/Map-Generator
|
d91356ba7264d93999bd78f73337ebc3c53e117c
|
[
"MIT"
] | 9
|
2017-06-29T22:04:53.000Z
|
2022-03-31T16:43:55.000Z
|
map_gen_2/canvas.py
|
Xorgon/Map-Generator
|
d91356ba7264d93999bd78f73337ebc3c53e117c
|
[
"MIT"
] | null | null | null |
map_gen_2/canvas.py
|
Xorgon/Map-Generator
|
d91356ba7264d93999bd78f73337ebc3c53e117c
|
[
"MIT"
] | 1
|
2019-05-26T18:42:57.000Z
|
2019-05-26T18:42:57.000Z
|
from tkinter import *
from PIL import Image, ImageTk
import math
from random import Random
import map_gen_2.util.vector_util as vect
from matplotlib.path import Path
import numpy as np
import aggdraw
import os
class MapCanvasBasic:
tk_master = None
canvas = None
rand = None
height = None
width = None
# Assets
parchment = None
mountain = None
tree = None
hill = None
def __init__(self, height=720, width=1280):
self.tk_master = Tk()
self.height = height
self.width = width
self.canvas = Canvas(self.tk_master, width=width, height=height)
self.rand = Random()
self.rand.seed(0)
self.load_assets()
def load_assets(self):
self.parchment = ImageTk.PhotoImage(Image.open("assets/parchment.jpg"))
self.mountain = ImageTk.PhotoImage(Image.open("assets/mountain.png").resize((40, 25), Image.ANTIALIAS))
self.tree = ImageTk.PhotoImage(Image.open("assets/tree.png").resize((5, 8), Image.ANTIALIAS))
self.hill = ImageTk.PhotoImage(Image.open("assets/hill.png").resize((20, 13), Image.ANTIALIAS))
def draw_line(self, p1, p2, color='black', width=1):
self.canvas.create_line(p1[0], p1[1], p2[0], p2[1], fill=color, width=width)
def draw_irregular_line(self, p1, p2, splits=3, mag_fact=0.25, color='black', width=1):
points = [p1, p2]
for i in range(splits):
new_points = []
for p_idx in range(len(points)):
new_points.append(points[p_idx])
if p_idx + 1 < len(points):
new_points.append(vect.split_line(points[p_idx], points[p_idx + 1], self.rand, mag_fact))
points = new_points
for i in range(len(points) - 1):
self.canvas.create_line(points[i][0], points[i][1], points[i + 1][0], points[i + 1][1], fill=color,
width=width, smooth=1)
def draw_multi_line(self, points, color='black', width=1):
for i in range(len(points) - 1):
self.draw_line(points[i], points[i + 1], color=color, width=width)
def draw_irregular_multi_line(self, points, splits=3, mag_fact=0.25, color='black', width=1):
for i in range(len(points) - 1):
self.draw_irregular_line(points[i], points[i + 1], splits=splits, mag_fact=mag_fact, color=color,
width=width)
def draw_point(self, p, color='black', radius=3):
self.canvas.create_oval(p[0] - radius, p[1] - radius, p[0] + radius, p[1] + radius, fill=color)
def fill_region(self, points, color='blue'):
corrected_points = []
for p in points:
corrected_points.append([p[0], p[1]])
self.canvas.create_polygon(corrected_points, fill=color, stipple="gray50")
def fill_region_with_image(self, points, image, step=5, rand=None, offset_fact=0.5):
min_p = [self.width, self.height]
max_p = [0, 0]
for point in points:
min_p[0] = min(min_p[0], point[0])
min_p[1] = min(min_p[1], point[1])
max_p[0] = max(max_p[0], point[0])
max_p[1] = max(max_p[1], point[1])
path = Path(points)
fill_points = []
for x in np.arange(min_p[0], max_p[0], step):
for y in np.arange(min_p[1], max_p[1], step):
if path.contains_point([x, y]):
fill_points.append([x, y])
fill_points = sorted(fill_points, key=lambda p: p[1])
for fill_point in fill_points:
if rand is None:
rand = Random()
offset = [(rand.random() - 0.5) * step * offset_fact, (rand.random() - 0.5) * step * offset_fact]
self.canvas.create_image(fill_point[0] + offset[0], fill_point[1] + offset[1], image=image, anchor=CENTER)
def draw_all_mountains(self, gen, points_per_mountain=1):
dps = []
for m in gen.sorted_mountain_dps:
dps.extend(m)
points = []
for d_p in dps:
points.append(gen.delaunay.points[d_p])
sorted_points = sorted(points, key=lambda point: point[1])
for i in range(len(sorted_points)):
if i % points_per_mountain != 0:
continue
p = sorted_points[i]
self.canvas.create_image(p[0], p[1], image=self.mountain, anchor=CENTER)
def draw_mountain_range(self, points, points_per_mountain=2):
sorted_points = sorted(points, key=lambda point: point[1])
for i in range(len(sorted_points)):
if i % points_per_mountain != 0:
continue
p = sorted_points[i]
self.canvas.create_image(p[0], p[1], image=self.mountain, anchor=CENTER)
def draw_map(self, gen, debug=False):
self.canvas.create_image(self.width / 2, self.height / 2, image=self.parchment, anchor=CENTER)
# Water
for water_poly in gen.water_polys:
self.fill_region(water_poly, '#dcdcdc')
for edge in gen.water_edges:
for i in range(len(edge) - 1):
self.draw_line(edge[i], edge[i + 1], "#483320", 2)
# Rivers
for riv in gen.river_points:
for i in range(len(riv) - 1):
thickness = round(2 * ((len(riv) - i) / len(riv)) ** 0.5)
self.draw_irregular_line(riv[i], riv[i + 1], color="#483320", width=int(thickness), splits=2)
# Mountains
self.draw_all_mountains(gen, 1)
# Hills
for d_p in gen.hill_dps:
region = gen.voronoi.regions[gen.voronoi.point_region[d_p]]
points = []
for vert_idx in region:
points.append(gen.voronoi.vertices[vert_idx])
self.fill_region_with_image(points, self.hill, 12, offset_fact=0.2)
# Forests
for d_p in gen.forest_dps:
region = gen.voronoi.regions[gen.voronoi.point_region[d_p]]
points = []
for vert_idx in region:
points.append(gen.voronoi.vertices[vert_idx])
self.fill_region_with_image(points, self.tree)
if debug:
self.draw_debug_geometry(gen)
def show_map(self):
self.canvas.pack()
self.tk_master.mainloop()
def draw_debug_geometry(self, gen):
for ridge in gen.voronoi.ridge_vertices:
v1_idx = ridge[0]
v2_idx = ridge[1]
if v1_idx != -1 and v2_idx != -1:
self.draw_line(gen.voronoi.vertices[v1_idx], gen.voronoi.vertices[v2_idx])
for vert in gen.voronoi.vertices:
self.draw_point(vert, "blue", 1)
for point in gen.init_points:
self.draw_point(point, "red", 1)
for p in gen.all_water_dps:
self.draw_point(gen.delaunay.points[p], "blue")
for line in gen.sorted_mountain_dps:
points = []
for d_p in line:
points.append(gen.delaunay.points[d_p])
self.draw_multi_line(points, color="red")
for point in gen.all_mountain_dps:
self.draw_point(gen.delaunay.points[point], color="purple")
for p in gen.debug_points:
self.draw_point(p, "yellow")
self.draw_multi_line(gen.debug_points, color="yellow")
class MapCanvas:
rand = None
height = None
width = None
# Assets
parchment = None
mountain = []
tree = []
hill = []
image = None
draw = None
def __init__(self, height=720, width=1280):
self.height = height
self.width = width
self.rand = Random()
self.rand.seed(0)
self.load_assets()
self.image = self.parchment
self.draw = aggdraw.Draw(self.image)
def load_assets(self):
self.parchment = Image.open("assets/parchment.jpg").convert("RGBA").resize((self.width, self.height),
Image.ANTIALIAS)
for im in os.listdir("assets/mountains/"):
self.mountain.append(
Image.open("assets/mountains/" + im).resize((40, 25), Image.ANTIALIAS))
for im in os.listdir("assets/trees/"):
self.tree.append(Image.open("assets/trees/" + im).resize((5, 8), Image.ANTIALIAS))
for im in os.listdir("assets/hills/"):
self.hill.append(Image.open("assets/hills/" + im).resize((20, 13), Image.ANTIALIAS))
def show_map(self):
self.image.show()
def draw_line(self, p1, p2, color='black', width=1.0):
pen = aggdraw.Pen(color, width)
self.draw.line((p1[0], p1[1], p2[0], p2[1]), pen)
def draw_irregular_line(self, p1, p2, splits=3, mag_fact=0.25, color='black', width=1.0):
points = [p1, p2]
for i in range(splits):
new_points = []
for p_idx in range(len(points)):
new_points.append(points[p_idx])
if p_idx + 1 < len(points):
new_points.append(vect.split_line(points[p_idx], points[p_idx + 1], self.rand, mag_fact))
points = new_points
for i in range(len(points) - 1):
self.draw_line(points[i], points[i + 1], color=color, width=width)
def draw_multi_line(self, points, color='black', width=1):
for i in range(len(points) - 1):
self.draw_line(points[i], points[i + 1], color=color, width=width)
def draw_irregular_multi_line(self, points, splits=3, mag_fact=0.25, color='black', width=1):
for i in range(len(points) - 1):
self.draw_irregular_line(points[i], points[i + 1], splits=splits, mag_fact=mag_fact, color=color,
width=width)
def draw_point(self, p, color='black', radius=3):
brush = aggdraw.Brush(color)
self.draw.ellipse((p[0] - radius, p[1] - radius, p[0] + radius, p[1] + radius), None, brush)
def fill_region(self, points, color='blue', opacity=128):
brush = aggdraw.Brush(color, opacity)
pen = aggdraw.Pen(color, width=0, opacity=0)
corrected_points = []
for p in points:
corrected_points.extend([p[0], p[1]])
self.draw.polygon(corrected_points, pen, brush)
def draw_image_at(self, p, image):
self.image.paste(image, (int(round(p[0])) - math.floor(image.width / 2),
int(round(p[1])) - math.floor(image.height / 2),
int(round(p[0])) + math.ceil(image.width / 2),
int(round(p[1])) + math.ceil(image.height / 2)),
mask=image)
def fill_region_with_image(self, points, image, step=5, offset_fact=0.5):
min_p = [self.width, self.height]
max_p = [0, 0]
for point in points:
min_p[0] = min(min_p[0], point[0])
min_p[1] = min(min_p[1], point[1])
max_p[0] = max(max_p[0], point[0])
max_p[1] = max(max_p[1], point[1])
path = Path(points)
fill_points = []
for x in np.arange(min_p[0], max_p[0], step):
for y in np.arange(min_p[1], max_p[1], step):
if path.contains_point([x, y]):
fill_points.append([x, y])
fill_points = sorted(fill_points, key=lambda p: p[1])
for fill_point in fill_points:
offset = [(self.rand.random() - 0.5) * step * offset_fact, (self.rand.random() - 0.5) * step * offset_fact]
self.draw_image_at(vect.add(fill_point, offset), image)
def fill_region_with_image_set(self, points, image_set, step=5, offset_fact=0.5):
min_p = [self.width, self.height]
max_p = [0, 0]
for point in points:
min_p[0] = min(min_p[0], point[0])
min_p[1] = min(min_p[1], point[1])
max_p[0] = max(max_p[0], point[0])
max_p[1] = max(max_p[1], point[1])
path = Path(points)
fill_points = []
for x in np.arange(min_p[0], max_p[0], step):
for y in np.arange(min_p[1], max_p[1], step):
if path.contains_point([x, y]):
fill_points.append([x, y])
fill_points = sorted(fill_points, key=lambda p: p[1])
for fill_point in fill_points:
offset = [(self.rand.random() - 0.5) * step * offset_fact, (self.rand.random() - 0.5) * step * offset_fact]
self.draw_image_at(vect.add(fill_point, offset), self.rand.choice(image_set))
def draw_all_mountains(self, gen, points_per_mountain=1):
dps = []
for m in gen.sorted_mountain_dps:
dps.extend(m)
points = []
for d_p in dps:
points.append(gen.delaunay.points[d_p])
sorted_points = sorted(points, key=lambda point: point[1])
for i in range(len(sorted_points)):
if i % points_per_mountain != 0:
continue
p = sorted_points[i]
self.draw_image_at(p, self.rand.choice(self.mountain))
def draw_map(self, gen, debug=False):
# Water
for water_poly in gen.water_polys:
self.fill_region(water_poly, '#dcdcdc')
for edge in gen.water_edges:
for i in range(len(edge) - 1):
self.draw_line(edge[i], edge[i + 1], "#483320", 2)
self.draw.flush()
# Rivers
for riv in gen.river_points:
for i in range(len(riv) - 1):
thickness = 2 * (len(riv) + 1 - i) / len(riv)
self.draw_irregular_line(riv[i], riv[i + 1], color="#483320", width=thickness, splits=2)
self.draw.flush()
if debug:
self.draw_debug_geometry(gen)
self.draw.flush()
# Mountains
self.draw_all_mountains(gen, 1)
# Hills
for d_p in gen.hill_dps:
region = gen.voronoi.regions[gen.voronoi.point_region[d_p]]
points = []
for vert_idx in region:
points.append(gen.voronoi.vertices[vert_idx])
self.fill_region_with_image_set(points, self.hill, 12, offset_fact=0.2)
# Forests
for d_p in gen.forest_dps:
region = gen.voronoi.regions[gen.voronoi.point_region[d_p]]
points = []
for vert_idx in region:
points.append(gen.voronoi.vertices[vert_idx])
self.fill_region_with_image_set(points, self.tree)
def draw_debug_geometry(self, gen):
for ridge in gen.voronoi.ridge_vertices:
v1_idx = ridge[0]
v2_idx = ridge[1]
if v1_idx != -1 and v2_idx != -1:
self.draw_line(gen.voronoi.vertices[v1_idx], gen.voronoi.vertices[v2_idx])
for vert in gen.voronoi.vertices:
self.draw_point(vert, "blue", 1)
for point in gen.init_points:
self.draw_point(point, "red", 1)
for p in gen.all_water_dps:
self.draw_point(gen.delaunay.points[p], "blue")
for line in gen.sorted_mountain_dps:
points = []
for d_p in line:
points.append(gen.delaunay.points[d_p])
self.draw_multi_line(points, color="red")
for point in gen.all_mountain_dps:
self.draw_point(gen.delaunay.points[point], color="purple")
for p in gen.debug_points:
self.draw_point(p, "yellow")
self.draw_multi_line(gen.debug_points, color="yellow")
| 37.107914
| 119
| 0.570828
| 2,187
| 15,474
| 3.878372
| 0.077732
| 0.036784
| 0.010611
| 0.019453
| 0.833176
| 0.784249
| 0.765621
| 0.735086
| 0.703018
| 0.690521
| 0
| 0.028921
| 0.298371
| 15,474
| 416
| 120
| 37.197115
| 0.752326
| 0.005622
| 0
| 0.725552
| 0
| 0
| 0.022701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091483
| false
| 0
| 0.028391
| 0
| 0.182965
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
481f11dafdf9c09addfe2ae9f2cd02c0875984d8
| 217
|
py
|
Python
|
python/stitch/core/__init__.py
|
theNewFlesh/sparse
|
21e895d2e24cc17e92fe921534059046080cc58b
|
[
"MIT"
] | 2
|
2020-04-17T04:26:23.000Z
|
2021-12-27T17:24:08.000Z
|
python/stitch/core/__init__.py
|
theNewFlesh/stitch
|
21e895d2e24cc17e92fe921534059046080cc58b
|
[
"MIT"
] | null | null | null |
python/stitch/core/__init__.py
|
theNewFlesh/stitch
|
21e895d2e24cc17e92fe921534059046080cc58b
|
[
"MIT"
] | null | null | null |
import stitch.core.utils
import stitch.core.errors
import stitch.core.stitch_frame
import stitch.core.stitch_lut
import stitch.core.stitch_parser
import stitch.core.stitch_interpreter
import stitch.core.stitch_string
| 27.125
| 37
| 0.870968
| 33
| 217
| 5.575758
| 0.30303
| 0.456522
| 0.608696
| 0.597826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 217
| 7
| 38
| 31
| 0.906404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4868ab7d3e4ed77b085f26f4ed51d3fab23a6ebd
| 3,630
|
py
|
Python
|
src/oci/__init__.py
|
xjuarez/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2020-09-10T22:09:45.000Z
|
2021-12-24T17:00:07.000Z
|
src/oci/__init__.py
|
xjuarez/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/__init__.py
|
xjuarez/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from . import auth, config, constants, decorators, exceptions, regions, pagination, retry, fips
from .base_client import BaseClient
from .request import Request
from .response import Response
from .signer import Signer
from .version import __version__ # noqa
from .waiter import wait_until
import os
fips.enable_fips_mode()
if os.getenv("OCI_PYTHON_SDK_NO_SERVICE_IMPORTS", "").lower() in ["true", "1"]:
pass
else:
from . import ai_anomaly_detection, ai_language, analytics, announcements_service, apigateway, apm_config, apm_control_plane, apm_synthetics, apm_traces, application_migration, appmgmt_control, artifacts, audit, autoscaling, bastion, bds, blockchain, budget, certificates, certificates_management, cims, cloud_guard, compute_instance_agent, container_engine, core, data_catalog, data_flow, data_integration, data_labeling_service, data_labeling_service_dataplane, data_safe, data_science, database, database_management, database_migration, database_tools, devops, dns, dts, email, events, file_storage, functions, generic_artifacts_content, golden_gate, healthchecks, identity, identity_data_plane, integration, jms, key_management, limits, load_balancer, log_analytics, logging, loggingingestion, loggingsearch, management_agent, management_dashboard, marketplace, monitoring, mysql, network_load_balancer, nosql, object_storage, oce, ocvp, oda, ons, operator_access_control, opsi, optimizer, os_management, osp_gateway, resource_manager, resource_search, rover, sch, secrets, service_catalog, service_manager_proxy, streaming, tenant_manager_control_plane, usage, usage_api, vault, vulnerability_scanning, waas, waf, work_requests
__all__ = [
"BaseClient", "Error", "Request", "Response", "Signer", "config", "constants", "decorators", "exceptions", "regions", "wait_until", "pagination", "auth", "retry", "fips", "circuit_breaker",
"ai_anomaly_detection", "ai_language", "analytics", "announcements_service", "apigateway", "apm_config", "apm_control_plane", "apm_synthetics", "apm_traces", "application_migration", "appmgmt_control", "artifacts", "audit", "autoscaling", "bastion", "bds", "blockchain", "budget", "certificates", "certificates_management", "cims", "cloud_guard", "compute_instance_agent", "container_engine", "core", "data_catalog", "data_flow", "data_integration", "data_labeling_service", "data_labeling_service_dataplane", "data_safe", "data_science", "database", "database_management", "database_migration", "database_tools", "devops", "dns", "dts", "email", "events", "file_storage", "functions", "generic_artifacts_content", "golden_gate", "healthchecks", "identity", "identity_data_plane", "integration", "jms", "key_management", "limits", "load_balancer", "log_analytics", "logging", "loggingingestion", "loggingsearch", "management_agent", "management_dashboard", "marketplace", "monitoring", "mysql", "network_load_balancer", "nosql", "object_storage", "oce", "ocvp", "oda", "ons", "operator_access_control", "opsi", "optimizer", "os_management", "osp_gateway", "resource_manager", "resource_search", "rover", "sch", "secrets", "service_catalog", "service_manager_proxy", "streaming", "tenant_manager_control_plane", "usage", "usage_api", "vault", "vulnerability_scanning", "waas", "waf", "work_requests"
]
| 145.2
| 1,406
| 0.774105
| 436
| 3,630
| 6.135321
| 0.428899
| 0.017944
| 0.028411
| 0.007477
| 0.765607
| 0.734206
| 0.734206
| 0.734206
| 0.734206
| 0.734206
| 0
| 0.004857
| 0.092562
| 3,630
| 24
| 1,407
| 151.25
| 0.807225
| 0.093664
| 0
| 0
| 0
| 0
| 0.366514
| 0.094977
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.058824
| 0.588235
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 10
|
4871447144529c302e097a99c22c5dcf316390eb
| 3,776
|
py
|
Python
|
tests/test_spectrum.py
|
fossabot/matchms
|
090db31b22829dc224e5ed78fb4901e8c14a8e92
|
[
"Apache-2.0"
] | null | null | null |
tests/test_spectrum.py
|
fossabot/matchms
|
090db31b22829dc224e5ed78fb4901e8c14a8e92
|
[
"Apache-2.0"
] | null | null | null |
tests/test_spectrum.py
|
fossabot/matchms
|
090db31b22829dc224e5ed78fb4901e8c14a8e92
|
[
"Apache-2.0"
] | null | null | null |
import numpy
from matchms import Spectrum
from matplotlib import pyplot as plt
def test_spectrum_plot_with_histogram_unspecified():
mz = numpy.array([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110], dtype="float")
intensities = numpy.array([1, 1, 5, 5, 5, 5, 7, 7, 7, 9, 9], dtype="float")
spectrum = Spectrum(mz=mz, intensities=intensities)
fig = spectrum.plot()
assert fig is not None
assert hasattr(fig, "axes")
assert isinstance(fig.axes, list)
assert len(fig.axes) == 1
assert isinstance(fig.axes[0], plt.Axes)
assert hasattr(fig.axes[0], "lines")
assert isinstance(fig.axes[0].lines, list)
assert len(fig.axes[0].lines) == 11
assert isinstance(fig.axes[0].lines[0], plt.Line2D)
assert hasattr(fig.axes[0].lines[0], "_x")
def test_spectrum_plot_with_histogram_false():
mz = numpy.array([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110], dtype="float")
intensities = numpy.array([1, 1, 5, 5, 5, 5, 7, 7, 7, 9, 9], dtype="float")
spectrum = Spectrum(mz=mz, intensities=intensities)
fig = spectrum.plot(with_histogram=False)
assert fig is not None
assert hasattr(fig, "axes")
assert isinstance(fig.axes, list)
assert len(fig.axes) == 1
assert isinstance(fig.axes[0], plt.Axes)
assert hasattr(fig.axes[0], "lines")
assert isinstance(fig.axes[0].lines, list)
assert len(fig.axes[0].lines) == 11
assert isinstance(fig.axes[0].lines[0], plt.Line2D)
assert hasattr(fig.axes[0].lines[0], "_x")
def test_spectrum_plot_with_histogram_true():
mz = numpy.array([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110], dtype="float")
intensities = numpy.array([1, 1, 5, 5, 5, 5, 7, 7, 7, 9, 9], dtype="float")
spectrum = Spectrum(mz=mz, intensities=intensities)
fig = spectrum.plot(with_histogram=True)
assert fig is not None
assert hasattr(fig, "axes")
assert isinstance(fig.axes, list)
assert len(fig.axes) == 2
assert isinstance(fig.axes[0], plt.Axes)
assert hasattr(fig.axes[0], "lines")
assert isinstance(fig.axes[0].lines, list)
assert len(fig.axes[0].lines) == 11
assert isinstance(fig.axes[0].lines[0], plt.Line2D)
assert hasattr(fig.axes[0].lines[0], "_x")
def test_spectrum_plot_with_histogram_true_and_intensity_limit():
mz = numpy.array([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110], dtype="float")
intensities = numpy.array([1, 1, 5, 5, 5, 5, 7, 7, 7, 9, 9], dtype="float")
spectrum = Spectrum(mz=mz, intensities=intensities)
fig = spectrum.plot(with_histogram=True, intensity_to=10.0)
assert fig is not None
assert hasattr(fig, "axes")
assert isinstance(fig.axes, list)
assert len(fig.axes) == 2
assert isinstance(fig.axes[0], plt.Axes)
assert hasattr(fig.axes[0], "lines")
assert isinstance(fig.axes[0].lines, list)
assert len(fig.axes[0].lines) == 11
assert isinstance(fig.axes[0].lines[0], plt.Line2D)
assert hasattr(fig.axes[0].lines[0], "_x")
def test_spectrum_plot_with_histogram_true_and_expfit_true_and_intensity_limit():
mz = numpy.array([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110], dtype="float")
intensities = numpy.array([1, 1, 5, 5, 5, 5, 7, 7, 7, 9, 9], dtype="float")
spectrum = Spectrum(mz=mz, intensities=intensities)
fig = spectrum.plot(with_histogram=True, with_expfit=True, intensity_to=10.0)
assert fig is not None
assert hasattr(fig, "axes")
assert isinstance(fig.axes, list)
assert len(fig.axes) == 2
assert isinstance(fig.axes[0], plt.Axes)
assert hasattr(fig.axes[0], "lines")
assert isinstance(fig.axes[0].lines, list)
assert len(fig.axes[0].lines) == 11
assert isinstance(fig.axes[0].lines[0], plt.Line2D)
assert hasattr(fig.axes[0].lines[0], "_x")
| 36.307692
| 83
| 0.661811
| 599
| 3,776
| 4.095159
| 0.096828
| 0.128414
| 0.097839
| 0.132491
| 0.96168
| 0.957603
| 0.944558
| 0.944558
| 0.944558
| 0.944558
| 0
| 0.077717
| 0.178761
| 3,776
| 103
| 84
| 36.660194
| 0.713318
| 0
| 0
| 0.833333
| 0
| 0
| 0.027807
| 0
| 0
| 0
| 0
| 0
| 0.641026
| 1
| 0.064103
| false
| 0
| 0.038462
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f8476cf01f6b32fef0a0870b75efa6d9fbd5a16
| 12,158
|
py
|
Python
|
demo/SAAVpedia/scripts/SAAVannotator.py
|
saavpedia/python
|
071f745dbe5dfa7a655cb08aa99886541b030daa
|
[
"Apache-2.0"
] | null | null | null |
demo/SAAVpedia/scripts/SAAVannotator.py
|
saavpedia/python
|
071f745dbe5dfa7a655cb08aa99886541b030daa
|
[
"Apache-2.0"
] | null | null | null |
demo/SAAVpedia/scripts/SAAVannotator.py
|
saavpedia/python
|
071f745dbe5dfa7a655cb08aa99886541b030daa
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
################################################################################
# Copyright 2018 Young-Mook Kang <ymkang@thylove.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import argparse, sys, os
import time
from datetime import datetime
from SAAVpedia import SAAVpedia
def main(theArgs):
theStartTime = time.time()
theSAAVpedia = SAAVpedia()
# Reading Input file
print 'Reading the input file...'
#theSAAVpedia.set(file(theArgs.input).read())
theSAAVpedia.openSCF(theArgs.input)
if theArgs.output:
theOutputName = theArgs.output
else:
theResultFolder = os.getcwd() + os.sep + 'result'
if not os.path.exists(theResultFolder):
os.mkdir(theResultFolder)
elif not os.path.isdir(theResultFolder):
theResultFolder += theResultFolder + datetime.now().strftime('-%Y-%m-%d-%Hh-%Mm-%S.%fs')
os.mkdir(theResultFolder)
theOutputName = theResultFolder + os.sep + datetime.now().strftime('SAAVannotator-%Y-%m-%d-%Hh-%Mm-%S.%fs.scf')
print 'Fetching output data...'
theRESTBegin = time.time()
theSAAVpedia.applyFilter(theArgs._get_kwargs())
theSCFData = theSAAVpedia.data()
theRESTEnd = time.time()
print 'Estimated time for fetching data: {0:.3f}s'.format(theRESTEnd-theRESTBegin)
print 'Writing {0} file...'.format(theOutputName)
theWriter = file(theOutputName, 'w')
theWriter.write(theSAAVpedia.getMetaInfo())
theWriter.write('#'+'\t'.join(theSAAVpedia.header())+'\n')
for ithData in theSCFData:
theWriter.write('\t'.join(ithData)+'\n')
theWriter.close()
theEndTime = time.time()
print 'Total estimated time: {0:.3f}s'.format(theEndTime-theStartTime)
pass
if __name__ == '__main__':
theParser = argparse.ArgumentParser(description='SAAVpedia: SAAVannotator program')
theParser.add_argument('--input', dest='input', help='SCF input file path')
theParser.add_argument('--output', dest='output', help='Functional Annotation with SCF output file path')
theParser.add_argument('--snv_1000g_oc', action='store_true', help='Occurring(OC) in the 1000 Genomes variant list.')
theParser.add_argument('--snv_1000g_t_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome population.')
theParser.add_argument('--snv_1000g_eas_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome East Asian population.')
theParser.add_argument('--snv_1000g_amr_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome American population.')
theParser.add_argument('--snv_1000g_eur_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome European population.')
theParser.add_argument('--snv_1000g_afr_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome African population.')
theParser.add_argument('--snv_1000g_sas_maf', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome South Asian population.')
theParser.add_argument('--snv_vt', action='store_true', help='Variant Type(VT)')
theParser.add_argument('--snv_esp_oc', action='store_true', help='Occurring(OC) in the Exome Sequencing Project(ESP) variant list.')
theParser.add_argument('--snv_esp_af_maf', action='store_true', help='Minor allele frequency(MAF) in the ESP African(AF) population.')
theParser.add_argument('--snv_esp_eu_maf', action='store_true', help='Minor allele frequency(MAF) in the ESP European(EU) population.')
theParser.add_argument('--snv_exac_oc', action='store_true', help='Occurring(OC) in the Exome Aggregation Consortium(ExAC) variant list.')
theParser.add_argument('--snv_phenotype', action='store_true', help='Phenotype term.')
theParser.add_argument('--snv_source', action='store_true', help='Reference database of SNV_Phenotype.')
theParser.add_argument('--snv_oid', action='store_true', help='EFO ontology ID(OID) of SNV_Phenotype.')
theParser.add_argument('--snv_phe_cls', action='store_true', help='EFO phenotype class(CLS) name of SNV_Phenotype.')
theParser.add_argument('--snv_db', action='store_true', help='Drug bank(DB) ID.')
theParser.add_argument('--snv_dn', action='store_true', help='Drug name(DN)')
theParser.add_argument('--snv_dt', action='store_true', help='Drug type(DT)')
theParser.add_argument('--snv_pgt', action='store_true', help='Pharmacological gene type')
theParser.add_argument('--ptm', action='store_true', help='Post-Translational Modification')
theParser.add_argument('--efo', action='store_true', help='EFO ID')
theParser.add_argument('--ptm-filter', dest='PTM', help='Filter by Post-Translational Modification')
theParser.add_argument('--efo-filter', dest='EFO_ID', help='Filter by EFO ID')
theParser.add_argument('--protein_uniplot', action='store_true', help='Uniplot identifier')
theParser.add_argument('--protein_nextprot', action='store_true', help='NextProt identifier')
theParser.add_argument('--protein_pdb', action='store_true', help='Protein Data Bank(PDB) identifier')
theParser.add_argument('--protein_ensembl_pro', action='store_true', help='Ensembl protein(Pro) identifier')
theParser.add_argument('--transcript_ensembl_tra', action='store_true', help='Ensembl transcript(Tra) identifier')
theParser.add_argument('--gene_ensembl_gen', action='store_true', help='Ensembl Gene(Gen) identifier')
theParser.add_argument('--gene_gf', action='store_true', help='Gene Families(GF)')
theParser.add_argument('--gene_gd', action='store_true', help='Gene description(GD)')
theParser.add_argument('--gene_gs', action='store_true', help='Gene Symbol(GS)')
theParser.add_argument('--gene_hgnc', action='store_true', help='HGNC ID')
theParser.add_argument('--gene_ucsc', action='store_true', help='UCSC ID')
theParser.add_argument('--gene_cosmic', action='store_true', help='Cosmic Gene ID')
theParser.add_argument('--gene_entrez', action='store_true', help='Entrez ID')
theParser.add_argument('--gene_refseq', action='store_true', help='Reference Sequence (RefSeq) accession number.')
theParser.add_argument('--disease_omim', action='store_true', help='Omim ID')
theParser.add_argument('--drug_pharmgkb', action='store_true', help='PharmGKB ID')
theParser.add_argument('--drug_chembl', action='store_true', help='CHEMBL ID')
theParser.add_argument('--literature_pmid', action='store_true', help='Pubmed ID')
theParser.add_argument('--biological_function_string', action='store_true', help='STRING ID')
theParser.add_argument('--biological_function_vega', action='store_true', help='Vega ID')
theParser.add_argument('--biological_function_ena', action='store_true', help='European Nucleotide Archive ID')
'''
theParser.add_argument('--SNV_1000G_OC', action='store_true', help='Occurring(OC) in the 1000 Genomes variant list.')
theParser.add_argument('--SNV_1000G_T_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome population.')
theParser.add_argument('--SNV_1000G_EAS_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome East Asian population.')
theParser.add_argument('--SNV_1000G_AMR_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome American population.')
theParser.add_argument('--SNV_1000G_EUR_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome European population.')
theParser.add_argument('--SNV_1000G_AFR_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome African population.')
theParser.add_argument('--SNV_1000G_SAS_MAF', action='store_true', help='Minor allele frequency(MAF) in the 1000Genome South Asian population.')
theParser.add_argument('--SNV_VT', action='store_true', help='Variant Type(VT)')
theParser.add_argument('--SNV_ESP_OC', action='store_true', help='Occurring(OC) in the Exome Sequencing Project(ESP) variant list.')
theParser.add_argument('--SNV_ESP_AF_MAF', action='store_true', help='Minor allele frequency(MAF) in the ESP African(AF) population.')
theParser.add_argument('--SNV_ESP_EU_MAF', action='store_true', help='Minor allele frequency(MAF) in the ESP European(EU) population.')
theParser.add_argument('--SNV_ExAC_OC', action='store_true', help='Occurring(OC) in the Exome Aggregation Consortium(ExAC) variant list.')
theParser.add_argument('--SNV_Phenotype', action='store_true', help='Phenotype term.')
theParser.add_argument('--SNV_Source', action='store_true', help='Reference database of SNV_Phenotype.')
theParser.add_argument('--SNV_OID', action='store_true', help='EFO ontology ID(OID) of SNV_Phenotype.')
theParser.add_argument('--SNV_Phe_CLS', action='store_true', help='EFO phenotype class(CLS) name of SNV_Phenotype.')
theParser.add_argument('--SNV_DB', action='store_true', help='Drug bank(DB) ID.')
theParser.add_argument('--SNV_DN', action='store_true', help='Drug name(DN)')
theParser.add_argument('--SNV_DT', action='store_true', help='Drug type(DT)')
theParser.add_argument('--SNV_PGT', action='store_true', help='Pharmacological gene type')
theParser.add_argument('--PTM', action='store_true', help='Post-Translational Modification')
theParser.add_argument('--EFO', action='store_true', help='EFO IDs')
theParser.add_argument('--PROTEIN_Uniplot', action='store_true', help='Uniplot identifier')
theParser.add_argument('--PROTEIN_Nextprot', action='store_true', help='NextProt identifier')
theParser.add_argument('--PROTEIN_PDB', action='store_true', help='Protein Data Bank(PDB) identifier')
theParser.add_argument('--PROTEIN_Enembl_Pro', action='store_true', help='Ensembl protein(Pro) identifier')
theParser.add_argument('--TRANSCRIPT_Enembl_Tra', action='store_true', help='Ensembl transcript(Tra) identifier')
theParser.add_argument('--GENE_Enembl_Gen', action='store_true', help='Ensembl Gene(Gen) identifier')
theParser.add_argument('--GENE_GF', action='store_true', help='Gene Families(GF)')
theParser.add_argument('--GENE_GD', action='store_true', help='Gene description(GD)')
theParser.add_argument('--GENE_GS', action='store_true', help='Gene Symbol(GS)')
theParser.add_argument('--GENE_HGNC', action='store_true', help='HGNC ID')
theParser.add_argument('--GENE_UCSC', action='store_true', help='UCSC ID')
theParser.add_argument('--GENE_Cosmic', action='store_true', help='Cosmic Gene ID')
theParser.add_argument('--GENE_Entrez', action='store_true', help='Entrez ID')
theParser.add_argument('--GENE_RefSeq', action='store_true', help='Reference Sequence (RefSeq) accession number.')
theParser.add_argument('--Disease_Omim', action='store_true', help='Omim ID')
theParser.add_argument('--DRUG_PharmGKB', action='store_true', help='PharmGKB ID')
theParser.add_argument('--DRUG_CHEMBL', action='store_true', help='CHEMBL ID')
theParser.add_argument('--Literature_PMID', action='store_true', help='Pubmed ID')
theParser.add_argument('--Biological_function_STRING', action='store_true', help='STRING ID')
theParser.add_argument('--Biological_function_Vega', action='store_true', help='Vega ID')
theParser.add_argument('--Biological_function_ENA', action='store_true', help='European Nucleotide Archive ID')
'''
theArgs = theParser.parse_args(sys.argv[1:])
if not theArgs.input:
theParser.print_help()
else :
main(theArgs)
pass
| 71.940828
| 148
| 0.724215
| 1,587
| 12,158
| 5.340895
| 0.160681
| 0.127419
| 0.212364
| 0.19278
| 0.781147
| 0.773714
| 0.773714
| 0.765219
| 0.765219
| 0.765219
| 0
| 0.011695
| 0.113835
| 12,158
| 168
| 149
| 72.369048
| 0.775014
| 0.054038
| 0
| 0.066667
| 0
| 0
| 0.419178
| 0.028054
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.022222
| 0.044444
| null | null | 0.066667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f95efa34c15be2aaf8551aff626ca0948248008
| 201
|
py
|
Python
|
model/__init__.py
|
rajun1971/SpeechRecognitionWar
|
f3f1311d32aca795082884c40db1862b994f76c4
|
[
"MIT"
] | 1
|
2020-05-27T05:23:23.000Z
|
2020-05-27T05:23:23.000Z
|
model/__init__.py
|
rajun1971/SpeechRecognitionWar
|
f3f1311d32aca795082884c40db1862b994f76c4
|
[
"MIT"
] | 3
|
2019-09-15T02:03:53.000Z
|
2021-04-30T20:55:33.000Z
|
model/__init__.py
|
rajun1971/SpeechRecognitionWar
|
f3f1311d32aca795082884c40db1862b994f76c4
|
[
"MIT"
] | null | null | null |
from .session import SessionCollector
from .googlespeechapi import Transcoder
from .microsoftspeechapi import Transcoder
from .watsonspeechapi import Transcoder
from .recaiusspeechapi import Transcoder
| 40.2
| 42
| 0.880597
| 20
| 201
| 8.85
| 0.45
| 0.361582
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094527
| 201
| 5
| 43
| 40.2
| 0.972527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f9894bb54f91913fae377421a514f5049899a64
| 44,691
|
py
|
Python
|
microsetta_private_api/LEGACY/locale_data/spanish_gut.py
|
gwarmstrong/microsetta-private-api
|
1ceb6444e02c368c6ecfffc1c7e70b35e4fdac64
|
[
"BSD-3-Clause"
] | null | null | null |
microsetta_private_api/LEGACY/locale_data/spanish_gut.py
|
gwarmstrong/microsetta-private-api
|
1ceb6444e02c368c6ecfffc1c7e70b35e4fdac64
|
[
"BSD-3-Clause"
] | null | null | null |
microsetta_private_api/LEGACY/locale_data/spanish_gut.py
|
gwarmstrong/microsetta-private-api
|
1ceb6444e02c368c6ecfffc1c7e70b35e4fdac64
|
[
"BSD-3-Clause"
] | null | null | null |
media_locale = {}
_NEW_PARTICIPANT = {
'ADD_HUMAN_TITLE': 'Agregar una nueva fuente humana',
'SEL_AGE_RANGE': 'Seleccione el rango de edad del participante:',
'AGE_0_6': '3 meses - 6 años',
'AGE_7_12': '7-12 años',
'AGE_13_17': '13-17 años',
'AGE_18': '18+ años',
# TODO: DO NOT MERGE WITHOUT THESE
'PARTICIPATION_AGREEMENT': 'PENDING',
'EXHIBIT_A': 'PENDING',
'BILL_OF_RIGHTS': '''Declaración de derechos del sujeto experimental''',
'TEXT_I_HAVE_READ_1': 'He leído (o alguien me ha leído) este formulario. Soy consciente de que se me pide que participe en un estudio de investigación. Acepto voluntariamente participar en este estudio.',
'TEXT_I_HAVE_READ_SIMPLIFIED': 'Sí, estarás en este estudio de investigación.',
'PERSON_ATTAINING_ASSENT': 'Firma de la persona que obtiene el consentimiento',
'TEXT_ASSENT_WITNESS': 'A mi juicio, el participante está dando su consentimiento voluntaria y conscientemente y posee la capacidad legal de dar su consentimiento para participar en el estudio.',
'OBTAINER_NAME': 'Nombre de la persona que obtiene el consentimiento',
'TEXT_I_HAVE_READ_PARENT': 'He leído (o alguien me ha leído) este formulario. Soy consciente de que se le está pidiendo a mi hijo que participe en un estudio de investigación. Acepto voluntariamente que mi hijo participe en este estudio',
'PARTICIPANT_NAME': 'Nombre del participante',
'PARTICIPANT_EMAIL': 'Correo electrónico del participante',
'PARTICIPANT_PARENT_1': 'Nombre del Padre de Familia / Guardian',
'PARTICIPANT_PARENT_2': 'Nombre del padre / tutor del segundo padre',
'PARTICIPANT_DECEASED_PARENTS': 'Uno de los padres / tutor ha fallecido o no puede dar su consentimiento',
'DATE_SIGNED': 'Fecha firmada',
'PARTICIPANT_ADDRESS': 'Dirección del participante',
'PARTICIPANT_CITY': 'Ciudad participante',
'PARTICIPANT_STATE': 'Estado participante',
'PARTICIPANT_ZIPCODE': 'Código postal del participante',
'PARTICIPANT_COUNTRY': 'País participante',
'CONSENT_YOUR_CHILD': '''
<p align='center'><b>University of California, San Diego</b><br/>
Consentimiento de los padres para que su hijo participe como sujeto en una investigación<br/></p>
<p align='center' style='font-weight: bold;'>Proyecto American Gut</p>
<p style='font-weight: bold;'><i>¿Quién realiza el estudio, por qué se le ha pedido a su hijo que participe, cómo se ha seleccionado a su hijo y cuál es la cifra aproximada de participantes en el estudio?</i></p>
<p>El Dr. Rob Knight está realizando un estudio para saber más sobre los billones de bacterias y otros organismos (denominados “microbioma”) que viven dentro de su cuerpo y en la superficie de este. Usted ha inscrito voluntariamente a su hijo en este estudio porque quiere saber más sobre su microbioma. Los niños, como los demás seres humanos, tienen un microbioma único. Incluirlos en el estudio ayudará a definir el desarrollo del microbioma. En el estudio participarán unas 100 000 personas de todo EE. UU. y de otros países.</p>
<p style='font-weight: bold;'><i>¿Por qué se está llevando a cabo este estudio?</i></p>
<p>El objetivo del estudio es evaluar con mayor precisión las diferencias entre las personas y determinar si dichas diferencias pueden atribuirse al modo de vida, a la dieta, al tipo de constitución corporal, a la edad o a la presencia de enfermedades relacionadas. Los resultados se utilizarán para crear una base de datos que contendrá secuencias y metabolitos de distintas zonas del cuerpo (p. ej., la piel, la boca, el estómago y los intestinos), y detalles del niño participante que provee la muestra. Otros investigadores podrán usar estos datos cuando estén estudiando el mismo tipo de muestra en otros estudios científicos (p. ej., ciertas enfermedades en las que sean comnes las anomalías del estómago y los intestinos). </p>
<p style='font-weight: bold;'>¿Qué le sucederá a su hijo durante el estudio, y qué procedimientos forman parte de la atención estándar y cuáles son experimentales?</p>
<p>Se le pregunta si desea que su hijo participe en este estudio porque usted se registró en el sitio web de American Gut para un análisis microbiológico. Cuando se registró, le enviamos un kit de toma de muestras junto con las instrucciones de inicio de sesión en el sitio web, de modo que pueda dar su consentimiento formal para el estudio.</p>
<p>Analizaremos todas las muestras para las que se hayan completado el formulario de consentimiento y el cuestionario. Se analizarán las muestras suministradas para el proyecto (incluidas las de su hijo) y los resultados se publicarán en artículos científicos en los que se definirá la diversidad del microbioma humano. Se le enviará un enlace a través del que podrá ver, descargar e imprimir diagramas en alta resolución —aptos para enmarcar— de sus resultados y acceder a un informe detallado sobre los taxones encontrados.</p>
<p>A través de este consentimiento, queremos que comprenda lo que haremos con la muestra de su hijo y lo que usted obtendrá a cambio.</p>
<p>Le pediremos que responda un cuestionario en línea sobre el modo de vida y los hábitos alimentarios de su hijo. Calculamos que no deberá llevarle más de 30 minutos. A continuación, tomará una muestra de una parte del cuerpo de su hijo (de interés para usted) con algo semejante a un bastoncillo de algodón estéril frotándolo por la piel o la lengua. También puede tomar una muestra de las heces de una de las siguientes tres formas:</p>
<ol>
<li>Inserte las puntas del bastoncillo en papel higiénico usado y devuelva la muestra en la funda de plástico suministrada.</li>
<li>Inserte las puntas del bastoncillo en papel higiénico usado, pase las puntas por la superficie de una tarjeta para pruebas de sangre oculta en las heces y devuélvanos la tarjeta. La tarjeta para pruebas de sangre oculta en las heces es el mismo instrumento que utiliza el médico para verificar si hay sangre en las heces. La tarjeta para pruebas de sangre oculta en las heces permite estabilizar las heces para su posterior análisis. No verificaremos si hay sangre en las heces con fines diagnósticos, puesto que no somos un laboratorio clínico.</li>
<li>Envíe una muestra completa de heces refrigeradas con hielo (cuando se solicite, para realizar otros análisis) en el recipiente para envíos que le suministraremos. Dicho recipiente contiene una serie de compresas de hielo que enfriarán la muestra de manera fiable a -20 °C/-4 °F.</li>
</ol>
<p>También puede tomar muestras de otras partes del cuerpo, como el oído, la nariz, la vagina, el cuero cabelludo o la planta del pie.</p>
<p>Debe enviarnos los bastoncillos o las tarjetas para pruebas de sangre oculta en las heces en el sobre suministrado y a través del servicio regular de correo nacional. Las muestras de heces completas deben enviarse a los laboratorios Knight Lab en el recipiente para envíos suministrado.</p>
<p>Se extraerá el ADN de la muestra, que se amplificará mediante PCR (reacción en cadena de la polimerasa) y se secuenciará para ver qué bacterias contiene la muestra y en qué proporción. Es posible que utilicemos el resto de la muestra para analizar los metabolitos, como las proteínas, o las sustancias químicas derivadas de las bacterias o para analizar en mayor detalle bacterias específicas que nos resulten de interés. Creemos que tardaremos unos dos meses en conocer los resultados del cálculo inicial de la diversidad bacteriana.</p>
<p>Los resultados se cargarán en su cuenta de American Gut en cuanto estén disponibles.</p>
<p>Le pedimos también que nos autorice a utilizar la muestra de su hijo o el ADN no humano extraído de dicha muestra (p. ej., el ADN de las bacterias) en futuros estudios. Además, queremos solicitarle su consentimiento para ponernos en contacto con usted por correo electrónico para solicitar más información como seguimiento a las preguntas sobre su hijo que haya respondido en el cuestionario.</p>
<p style='font-weight: bold;'>Atención: La secuenciación no tiene fines diagnósticos y no está dirigida al ADN humano.>
<p style='font-weight: bold;'>¿Cuánto tiempo es necesario para realizar cada procedimiento del estudio, cuánto tiempo debe dedicar en total su hijo y cuánto durará el estudio?</p>
<p>Tardará 30 minutos o menos en responder el cuestionario en línea. Tardará 5 minutos o menos en tomar cada una de las muestras que vaya a suministrar. En principio, el estudio durará 5 años. No obstante, los resultados estarán a su disposición antes de que este termine (por lo general, en un plazo de 2 meses después de que recibamos la muestra). Puede optar por tomar muestras de su hijo en más de una ocasión. Si los datos personales de su hijo (p. ej., su dirección o estado de salud) cambian, le pedimos que vuelva a introducir voluntariamente los datos de contacto y/o las respuestas del cuestionario.</p>
<p style='font-weight: bold;'>¿Cuáles son los riesgos relacionados con este estudio?</p>
<p>Las técnicas de toma de muestras se usan desde hace más o menos 10 años y no se han notificado efectos secundarios al respecto. No examinamos el ADN humano que haya en la muestra, por lo que no obtendremos información personal sobre el genoma. Antes de consignar datos en archivos de acceso público se eliminan todos los datos conocidos de la secuencia humana. El personal del estudio ha tomado precauciones para garantizar que el riesgo de pérdida de la confidencialidad sea mínimo. Si se pusiera en riesgo la confidencialidad, las consecuencias para su hijo serían mínimas, pues los resultados no son diagnósticos y no repercuten en la decisión de las aseguradoras de darle o no cobertura a su hijo.</p>
<p><i>(Por lo general, las leyes federales y estatales consideran que es ilegal que las aseguradoras, las aseguradoras para grupos y la mayor parte de los empleadores le discriminen por su información genética. Por norma general, dichas leyes lo protegen de las siguientes formas: a) las aseguradoras y las aseguradoras para grupos no pueden solicitarnos la información genética que obtengamos sobre usted en esta investigación; b) las aseguradoras y las aseguradoras para grupos no pueden usar su información genética para tomar decisiones sobre sus opciones de cobertura o pagos por seguro; c) los empleadores que tengan cinco empleados o más no pueden usar la información genética que obtengamos sobre usted en esta investigación para tomar decisiones de contratación, ascenso o despido ni para definir condiciones laborales.)</i></p>
<p>Dado que este es un estudio de investigación, puede haber riesgos desconocidos que no podemos prever. Le informaremos de cualquier novedad importante al respecto.</p>
<p style='font-weight: bold;'>¿Cuáles son las alternativas a no participar en este estudio?</p>
<p>El estudio es completamente voluntario, y si no permite que su hijo participe no tendrá que hacer frente a ninguna consecuencia. No hay pruebas alternativas.</p>
<p style='font-weight: bold;'>¿Cuáles beneficios podrían anticiparse razonablemente?</p>
<p>Su hijo no obtendrá ningún beneficio directo por participar en este estudio. De cualquier modo, el investigador puede obtener más información sobre el microbioma humano en situaciones de buena salud y de enfermedad, lo que podría resultar valioso para los investigadores de otros estudios.</p>
<p style='font-weight: bold;'>¿Puede decidir que su hijo no participe o se retire del estudio sin recibir sanciones ni perder beneficios?</p>
<p>Ni usted ni su hijo obtendrán ningún beneficio directo por participar en este estudio. Creemos que tal vez sienta curiosidad natural por saber qué bacterias contiene su muestra cómo es en comparación con las muestras de otras personas del mismo sexo y edad. De cualquier modo, el investigador obtendrá más información sobre el microbioma humano en situaciones de buena salud y de enfermedad, lo que podría resultar valioso para los investigadores de otros estudios. Tal vez pueda deducir de sus impuestos la contribución al proyecto. Fundrazr, que se encarga de todas las contribuciones financieras, le enviará un recibo.</p>
<p>Analizaremos todas las muestras para las que se hayan completado el formulario de consentimiento y el cuestionario. Se analizarán las muestras del proyecto (incluidas las de su hijo) y los resultados se publicarán en un artículo científico. Se le enviará un enlace a través del que podrá ver, descargar e imprimir diagramas en alta resolución de sus resultados, aptos para enmarcar, y acceder a un informe detallado sobre los taxones encontrados.</p>
<p>Ni usted ni su médico pueden utilizar los resultados del análisis para confirmar diagnósticos clínicos; no realizamos pruebas de detección de enfermedades infecciosas.</p>
<p style='font-weight: bold;'>¿Se puede retirar a su hijo del estudio sin su consentimiento?</p>
<p>La participación en el estudio es completamente voluntaria. Puede negarse a que su hijo participe o hacer que su hijo se retire del estudio en cualquier momento, sin tener que recibir sanciones ni perder los beneficios que le correspondan. Si decide que no desea que su hijo siga participando en el estudio, se le pedirá que se comunique a la línea de asistencia del proyecto American Gut para comunicarnos su intención de que el niño abandone el estudio. Si no se ha procesado la muestra, puede solicitar un reembolso, que se tramitará en el sitio mediante el que realizó su contribución al proyecto.</p>
<p>Se le notificará cualquier información importante obtenida durante el transcurso del estudio y que pueda afectar a su voluntad de que su hijo siga participando.</p>
<p style='font-weight: bold;'>¿Se le pagará por participar en este estudio?</p>
<p>No recibirá ninguna remuneración económica por participar en este estudio.</p>
<p style='font-weight: bold;'>¿Hay algún gasto vinculado con la participación en el estudio?</p>
<p>Una vez que se haya inscrito en el proyecto y haya enviado por correo su muestra, no tendrá que pagar nada por participar en el estudio.</p>
<p style='font-weight: bold;'>¿Qué sucede si su hijo sufre una lesión como consecuencia directa del estudio?</p>
<p>Si su hijo sufre una lesión como consecuencia directa de su participación en este estudio, la Universidad de California le prestará toda la atención médica necesaria para su tratamiento. Esta universidad no ofrecerá ningún otro tipo de indemnización a su hijo por lesiones. Para saber más sobre este tema, usted o su hijo pueden llamar a la oficina del Programa de protección para investigaciones con seres humanos al 858-246-4777 para consultar sobre sus derechos como sujeto de investigación o denunciar problemas relacionados con el estudio.</p>
<p style='font-weight: bold;'>¿Y la confidencialidad?</p>
<p>Se protegerá la confidencialidad de los registros de la investigación en la medida que lo permita la ley. Todos los datos sobre su hijo que se introduzcan en el sitio web se guardarán en un servidor protegido por contraseña ubicado en el SDSC (San Diego Supercomputer Center), que se encuentra en la UCSD y cuenta con acceso controlado por características biométricas. Los datos financieros de los participantes que contribuyan al proyecto no se pondrán a disposición de los investigadores. El código (que vincula los datos personales del participante con los códigos de barras de las muestras) se guarda en otro servidor protegido por contraseña, al que solo pueden acceder el investigador principal, el investigador secundario, el coordinador de las muestras y los programadores de la base de datos. Todos los análisis se realizan con datos anonimizados; los datos que se consignan en archivos públicos para uso de otros investigadores también se anonimizan. La Junta de Revisión Institucional de la UCSD podrá revisar los registros del estudio.</p>
<p>Tendrá que presentar datos a través de los que se le podría identificar si se publicaran, como su nombre, su edad, su fecha de nacimiento y su dirección. Hemos hecho todo lo posible para garantizar que nadie pueda identificarle a partir de los datos personales que suministre, pero conservamos información fundamental, como su sexo y su edad, sin poner en riesgo sus datos personales ni la integridad de estos.</p>
<p>Es posible que debamos denunciar incidentes confirmados o sospechas razonables de maltrato o abandono de niños, adultos dependientes o personas mayores, incluidos el maltrato físico o emocional, el abuso sexual o económico y el abandono. El único modo en que podríamos descubrir dicho maltrato es que lo denuncie el propio participante o su tutor legal, por lo que no es probable que suceda. Si los investigadores se percatan de este tipo de información, podrán denunciarla ante las autoridades pertinentes.</p>
<p style='font-weight: bold;'>¿A quién puede llamar si tiene alguna duda?</p>
<p>Si tiene alguna duda o problemas relacionados con la investigación, puede llamar a Rob Knight al 858-246-1184 o a Dominic Nguyen al 858-534-8739.</p>
<p>Puede llamar a la oficina del Programa de protección para investigaciones con seres humanos al 858-246-4777 para consultar acerca de sus derechos como sujeto de investigación o denunciar cualquier problema relacionado con el estudio.</p>
<p style='font-weight: bold;'>Firma y consentimiento</p>
<p>Ha recibido una copia de este documento de consentimiento y una copia de la “Declaración de derechos del sujeto experimental”, para que las conserve.</p>
<p>Autoriza a su hijo a participar.</p>''',
'CONSENT_18': '''
<p align='center'><b>University of California, San Diego</b><br/>
Consentimiento para participar como sujeto de investigación</p>
<p style='font-weight: bold;' align='center'>Proyecto American Gut</p>
<p style='font-weight: bold;'><i>¿Quién realiza el estudio, por qué se le ha pedido que participe, cómo fue seleccionado y cuál es la cifra aproximada de participantes en el estudio?</i></p>
<p>El Dr. Rob Knight está realizando un estudio de investigación para saber más sobre los billones de bacterias y otros organismos (denominados “microbioma”) que viven dentro de su cuerpo y en su superficie. Le hemos pedido que participe en este estudio porque posee un microbioma único, distinto al del resto del mundo. En el estudio participarán unas 100 000 personas de todos los EE. UU. y otros países.</p>
<p style='font-weight: bold;'><i>¿Por qué se está llevando a cabo este estudio?</i></p>
<p>El objetivo del estudio es evaluar con mayor precisión las diferencias entre las personas y determinar si dichas diferencias pueden atribuirse al modo de vida, a la dieta, al tipo de constitución corporal, a la edad o a la presencia de enfermedades relacionadas. Los resultados se utilizarán para crear una base de datos que contendrá secuencias y metabolitos de distintas zonas del cuerpo (p. ej., la piel, la boca o el estómago), así como detalles de los participantes que provean las muestras que otros investigadores podrán usar cuando necesiten muestras para compararlas al estudiar el mismo tipo de muestra en otros estudios científicos, p. ej., ciertas enfermedades en las que sean comunes las anomalías del estómago y los intestinos.</p>
<p style='font-weight: bold;'><i>¿Qué le sucederá durante el estudio?</i></p>
<p>Se le ha pedido que participe en este estudio porque se registró en el sitio web de American Gut (estudio sobre el estómago y los intestinos de los estadounidenses) para un análisis microbiológico. Cuando se registró, le enviamos un kit de toma de muestras junto con las instrucciones de inicio de sesión en el sitio web, de modo que pueda dar su consentimiento formal para participar en el estudio.</p>
<p>Analizaremos todas las muestras para las que se hayan completado el formulario de consentimiento y el cuestionario. Se analizarán las muestras suministradas para el proyecto (incluidas las suyas) y los resultados se publicarán en artículos científicos en los que se definirá la diversidad del microbioma humano. Se le enviará un enlace a través del que podrá ver, descargar e imprimir diagramas en alta resolución de sus resultados —aptos para enmarcar— y acceder a un informe más detallado sobre los taxones encontrados. A través de este consentimiento queremos que comprenda lo que haremos con la muestra y lo que usted obtendrá a cambio.</p>
<p>Le pediremos que complete un cuestionario en línea sobre su modo de vida y sus hábitos alimentarios. Calculamos que no deberá llevarle más de 30 minutos. A continuación, le pediremos que tome una muestra de una parte de su cuerpo (de interés para usted) con algo semejante a un bastoncillo de algodón estéril frotándolo por la piel o la lengua. También puede tomar una muestra de sus heces de una de las siguientes tres formas:</p>
<ol>
<li>Inserte las puntas del bastoncillo en papel higiénico usado y devuelva la muestra en la funda de plástico suministrada.</li>
<li>Inserte las puntas del bastoncillo en papel higiénico usado, pase las puntas por la superficie de una tarjeta para pruebas de sangre oculta en las heces y devuélvanos la tarjeta. La tarjeta para pruebas de sangre oculta en las heces es el instrumento que utiliza el médico para verificar si hay sangre en las heces. La tarjeta para pruebas de sangre oculta en las heces permite estabilizar las heces para su posterior análisis. No verificaremos si hay sangre en las heces con fines diagnósticos, puesto que no somos un laboratorio clínico.</li>
<li>Envíe una muestra completa de heces refrigeradas con hielo (cuando se le solicite, para realizar otros análisis) en el recipiente para envíos que le suministraremos. Dicho recipiente contiene una serie de compresas de hielo que enfriarán la muestra de manera fiable a -20 °C/-4 °F.</li>
</ol>
<p>También puede tomar muestras de otras partes del cuerpo, como el oído, la nariz, la vagina, el cuero cabelludo o la planta del pie.</p>
<p>Debe enviarnos los bastoncillos o las tarjetas para pruebas de sangre oculta en las heces en el sobre suministrado a través del servicio regular de correo nacional. Las muestras de heces completas deben enviarse al laboratorio Knight Lab en el recipiente para envíos suministrado. </p>
<p>Se extraerá el ADN de la muestra, y este se amplificará mediante PCR (reacción en cadena de la polimerasa) y se secuenciará para ver qué bacterias contiene la muestra y en qué proporción. Es posible que utilicemos el resto de la muestra para analizar los metabolitos, como las proteínas, o las sustancias químicas derivadas de las bacterias o para tratar de detectar bacterias específicas que nos resulten de interés. Creemos que pasarán dos meses para que usted conozca los resultados del cálculo inicial de la diversidad bacteriana.</p>
<p>Los resultados se cargarán en su cuenta de American Gut en cuanto estén disponibles.</p>
<p>Queremos también que nos autorice a utilizar la muestra suministrada o el ADN no humano extraído de dicha muestra (p. ej., el ADN de las bacterias) en futuros estudios. Además, queremos solicitarle su consentimiento para ponernos en contacto con usted por correo electrónico a fin de pedirle más información como seguimiento a las preguntas que haya respondido en el cuestionario.</p>
<p><b><u>Atención: La secuenciación no tiene fines diagnósticos y no está dirigida al ADN humano.</u></b></p>
<p style='font-weight: bold;'><i>¿Cuánto tiempo es necesario para realizar cada procedimiento del estudio, cuánto tiempo debe dedicar en total y cuánto durará el estudio?</i></p>
<p>Tardará 30 minutos o menos para responder el cuestionario en línea. Tardará 5 minutos o menos en tomar cada una de las muestras que vaya a suministrar. Anticipamos que el estudio dure cinco años. No obstante, los resultados estarán a su disposición antes de que el estudio termine (por lo general, en un plazo de 2 meses después de que recibamos la muestra). Puede optar por tomar muestras en más de una ocasión. Si sus datos personales (p. ej., su dirección o estado de salud) cambian, le pedimos que vuelva a introducir voluntariamente sus datos de contacto y/o las respuestas del cuestionario.</p>
<p style='font-weight: bold;'><i>¿Cuáles son los riesgos relacionados con este estudio?</i></p>
<p>Las técnicas de toma de muestras se usan desde hace más o menos 10 años, y no se han notificado efectos secundarios al respecto. No examinamos el ADN humano que haya en la muestra, por lo que no obtendremos información personal sobre su genoma. Antes de consignar datos en archivos de acceso público se eliminan todos los datos conocidos de la secuencia humana. El personal del estudio ha tomado precauciones para garantizar que el riesgo de pérdida de la confidencialidad sea mínimo. Si se pusiera en riesgo la confidencialidad, las consecuencias para usted serían mínimas, pues los resultados no son diagnósticos y no repercuten en la decisión de las aseguradoras de darle o no cobertura. <i>(Por lo general, las leyes federales y estatales consideran que es ilegal que las aseguradoras, las aseguradoras para grupos y la mayor parte de los empleadores le discriminen por su información genética. Por norma general, dichas leyes lo protegen de las siguientes formas: a) las aseguradoras y las aseguradoras para grupos no pueden solicitarnos la información genética que obtengamos sobre usted en esta investigación; b) las aseguradoras y las aseguradoras para grupos no pueden usar su información genética para tomar decisiones sobre sus opciones de cobertura o pagos por seguro; c) los empleadores que tengan cinco empleados o más no pueden usar la información genética que obtengamos sobre usted en esta investigación para tomar decisiones de contratación, ascenso o despido ni para definir condiciones laborales.)</i></p>
<p>Dado que este es un estudio de investigación, puede haber riesgos desconocidos que no podemos prever. Le informaremos de cualquier novedad importante al respecto.</p>
<p style='font-weight: bold;'><i>¿Cuáles son las alternativas a no participar en este estudio?</i></p>
<p>El estudio es completamente voluntario y si no participa, no tendrá que hacer frente a ninguna consecuencia. No hay pruebas alternativas.</p>
<p style='font-weight: bold;'><i>¿Cuáles podrían ser los beneficios de participar?</i></p>
<p>No obtendrá ningún beneficio directo por participar en este estudio. Creemos que tal vez sienta curiosidad natural por saber qué microbios contiene su muestra y cómo es en comparación con las muestras de otras personas del mismo sexo y edad. De cualquier modo, el investigador obtendrá más información sobre el microbioma humano en situaciones de buena salud y de enfermedad, lo que podría resultar valioso para los investigadores de otros estudios. Tal vez pueda deducir de sus impuestos la contribución al proyecto. Fundrazr, que se encarga de todas las contribuciones financieras, le enviará un recibo.</p>
<p>Analizaremos todas las muestras para las que se hayan completado el formulario de consentimiento y el cuestionario. Se analizarán las muestras suministradas (incluidas las suyas) y los resultados se publicarán en un artículo científico. Se le enviará un enlace a través del que podrá ver, descargar e imprimir diagramas en alta resolución de sus resultados, aptos para enmarcar. Ni usted ni su médico pueden utilizar los resultados del análisis para confirmar diagnósticos clínicos; no realizamos pruebas de detección de enfermedades infecciosas.</p>
<p style='font-weight: bold;'><i>¿Puede decidir no participar o retirarse del estudio sin recibir sanciones ni perder beneficios?</i></p>
<p>La participación en el estudio es completamente voluntaria. Puede negarse a participar o retirarse del estudio en cualquier momento, sin tener que recibir sanciones ni perder los beneficios que le correspondan. Si decide que no desea seguir participando en el estudio, se le pedirá que envíe un mensaje a la dirección de correo electrónico de asistencia del proyecto American Gut para comunicarnos su intención de retirarse del estudio. Si no se ha procesado la muestra, puede solicitar un reembolso, que se tramitará en el sitio en el que realizó su contribución al proyecto.</p>
<p>Se le notificará cualquier información importante obtenida durante el transcurso del estudio y que pueda afectar a su deseo de seguir participando.</p>
<p style='font-weight: bold;'><i>¿Se le puede retirar del estudio sin que usted dé su consentimiento?</i></p>
<p>Es posible que se le retire del estudio si no completa el consentimiento. También se le podría retirar si no sigue las instrucciones que le dé el personal del estudio.</p>
<p style='font-weight: bold;'><i>¿Se le pagará por participar en este estudio?</i></p>
<p>No recibirá ninguna remuneración económica por participar en este estudio.</p>
<p style='font-weight: bold;'><i>¿Hay algún gasto vinculado con la participación en el estudio?</i></p>
<p>Una vez que se haya inscrito en el proyecto y haya enviado por correo su muestra, no tendrá que pagar nada por participar en el estudio.</p>
<p style='font-weight: bold;'><i>¿Qué sucede si sufre una lesión como consecuencia directa del estudio?</i></p>
<p>Si sufre una lesión como consecuencia directa de su participación en este estudio, la Universidad de California le prestará toda la atención médica necesaria para el tratamiento de esas lesiones. Esta universidad no le ofrecerá ningún otro tipo de indemnización por lesiones. Para saber más sobre este tema, llame a la oficina del Programa de protección para investigaciones con seres humanos al (858) 246-4777 para consultar acerca de sus derechos como sujeto de investigación o denunciar cualquier problema relacionado con el estudio.</p>
<p style='font-weight: bold;'><i>¿Y su confidencialidad?</i></p>
<p>Se protegerá la confidencialidad de los registros de la investigación en la medida que lo permita la ley. Todos los datos sobre usted que se introduzcan en el sitio web se guardarán en un servidor protegido por contraseña ubicado en el SDSC (San Diego Supercomputer Center), que se encuentra en la UCSD y cuenta con acceso controlado por características biométricas. Los datos financieros de los participantes que contribuyan al proyecto no se pondrán a disposición de los investigadores. El código (que vincula los datos personales del participante con los códigos de barras de las muestras) se guarda en otro servidor protegido por contraseña, al que solo pueden acceder el investigador principal, el investigador secundario, el coordinador de las muestras, el administrador de TI y los programadores de la base de datos. Todos los análisis se realizan con datos anonimizados; los datos que se consignan en archivos públicos para el uso de otros investigadores también se anonimizan. La Junta de Revisión Institucional de la UCSD podrá revisar los registros del estudio. Tendrá que suministrar datos a través de los que se le podría identificar si se publicaran, como su nombre, su edad, su fecha de nacimiento y su dirección. Hemos hecho todo lo posible para garantizar que nadie pueda identificarle a partir de los datos personales que suministre, pero conservamos información fundamental, como su sexo y su edad, sin poner en riesgo sus datos personales ni la integridad de estos.</p>
<p>Es posible que debamos denunciar incidentes confirmados o sospechas razonables de maltrato o abandono de niños, adultos dependientes o personas mayores, incluidos el maltrato físico o emocional, el abuso sexual o económico y el abandono. El único modo en que podríamos descubrir dicho maltrato es que lo denuncie el propio participante o su tutor legal, por lo que no es probable que suceda. Si los investigadores se percatan de este tipo de información, podrán denunciarla ante las autoridades pertinentes.</p>
<p style='font-weight: bold;'><i>¿A quién puede llamar si tiene alguna duda?</i></p>
<p> Si tiene alguna duda o problemas relacionados con la investigación, puede llamar a Rob Knight al 858-246-1184 o a Dominic Nguyen al 858-534-8739.</p>
<p>Puede llamar a la oficina del Programa de protección para investigaciones con seres humanos al 858-246-4777 para consultar acerca de sus derechos como sujeto de investigación o denunciar cualquier problema relacionado con el estudio.</p>
<p style='font-weight: bold;'><i>Firma y consentimiento</i></p>
<p>Ha recibido una copia de este documento de consentimiento y una copia de la “Declaración de derechos del sujeto experimental” para que las conserve.</p>
<p>Acepta participar.</p>''',
'ASSENT_13_17': '''
<p align='center'>
<b>University of California, San Diego</b><br/>
Asentimiento para participar como sujeto de investigación<br/>
(de 13 a 17 años)
</p>
<p align='center'>
<b>Proyecto American Gut</b>
</p>
<p style='font-weight: bold;'><i>¿Quién realiza el estudio, por qué se te ha pedido que participes, cómo se te ha seleccionado y cuál es la cifra aproximada de participantes en el estudio?</i></p>
<p>El Profesor Rob Knight está llevando a cabo un estudio para saber más sobre el microbioma (microorganismos [seres vivos diminutos, como bacterias] inofensivos o inocuos que viven dentro de tu cuerpo y en la superficie de este). Te hemos invitado a participar en este estudio porque como el resto de los habitantes de este planeta, tú tienes un microbioma único, y mientras más personas de todas las edades estudiemos, mejor podremos comprender cómo nos ayudan o nos perjudican los microorganismos. En el estudio participarán unas 100 000 personas.</p>
<p style='font-weight: bold;'><i>¿Por qué se realiza este estudio?</i></p>
<p>El objetivo del estudio es evaluar con mayor precisión las diferencias entre las personas y determinar si dichas diferencias pueden atribuirse al modo de vida, a la dieta, al tipo constitucional, a la edad o a la presencia de enfermedades relacionadas. Los resultados se utilizarán para crear una base de datos que contendrá secuencias y metabolitos de distintas zonas del cuerpo (p. ej., la piel, la boca o el estómago y los intestinos), y detalles acerca de ti (el participante que provee la muestra) que otros investigadores podrán usar cuando necesiten muestras para compararlas con lo que ellos estén estudiando, p. ej., ciertas enfermedades en las que las anomalías estomacales son comunes.</p>
<p style='font-weight: bold;'><i>¿Qué te sucederá durante estudio y qué procedimientos son de referencia y cuáles son experimentales?</i></p>
<p>Cuando hayas dado tu consentimiento para participar en el estudio, tendrás que completar un cuestionario en línea en el que responderás a preguntas sobre tu cuerpo, edad, peso, estatura y hábitos alimentarios, así como si has tomado antibióticos, si tienes determinadas enfermedades o si tomas suplementos, como vitaminas. A continuación, tendrás que tomar una muestra de tu cuerpo con el bastoncillo que se suministra con el kit del proyecto American Gut (sobre el estómago y los intestinos de los estadounidenses). En el sobre encontrarás unas instrucciones que te indicarán qué debes hacer. La muestra más común es de material fecal (heces). Para tomarla, inserta las puntas del bastoncillo en papel higiénico usado o pasa las puntas por la superficie de la tarjeta (la tarjeta para pruebas de sangre oculta en las heces). También puedes tomar la muestra de cualquier zona de la piel, de la lengua o la boca, de las fosas nasales, de la cera de los oídos o de la vagina. Puede que se te pida que deposites materia fecal en un recipiente de plástico, que debes colocar debajo de la taza del inodoro. Ninguna de estas muestras o investigaciones nos permitirán diagnosticar enfermedades. Tampoco examinaremos nada en tu propio ADN que también pueda encontrarse en la materia fecal, o en la piel o la saliva. Puede que deseemos ponernos en contacto contigo para hacerte más preguntas acerca de la información que tú o tu mamá/papá/tutor hayan suministrado en el cuestionario.</p>
<p style='font-weight: bold;'><i>¿Cuánto tiempo es necesario para realizar los procedimientos del estudio, cuánto tiempo debes dedicar en total y cuánto durará el estudio?</i></p>
<p>Tardarás 30 minutos o menos para responder el cuestionario en línea. Tardarás 5 minutos o menos en tomar cada una de las muestras que vayas a suministrar. En principio, el estudio durará 5 años. No obstante, los resultados estarán a tu disposición antes de que este termine (por lo general, en un plazo de 2 meses después de que recibamos la muestra). Puedes optar por tomar muestras en más de una ocasión. Si tus datos personales (p. ej., tu dirección o estado de salud) cambian, te pedimos que vuelvas a introducir voluntariamente tus datos de contacto o las respuestas del cuestionario para que podamos hacer un seguimiento contigo.</p>
<p style='font-weight: bold;'><i>¿Cuáles son los riesgos relacionados con este estudio?</i></p>
<p>Las técnicas de toma de muestras se usan desde hace unos 5 años y no se han notificado efectos secundarios al respecto. No examinaremos tu información genética personal y la información que divulguemos en archivos de datos públicos no contendrá información genética humana, puesto que dicha información se habrá eliminado previamente. El personal del estudio ha tomado precauciones para garantizar que el riesgo de filtración de tus datos personales sea mínimo. Si se publicaran tus datos, las consecuencias serían mínimas, puesto que las pruebas no pueden usarse para elaborar diagnósticos. $(20)s</p>
<p style='font-weight: bold;'><i>¿Cuáles son las alternativas a no participar en este estudio?</i></p>
<p>No estás obligado a participar. No tendrás que hacer frente a ninguna consecuencia.</p>
<p style='font-weight: bold;'><i>¿Cuáles beneficios podrían anticiparse razonablemente?</i></p>
<p>No obtendrás ningún beneficio directo por participar en este estudio. El investigador publicará un artículo científico para describir lo que encuentre en las muestras del estudio, incluida la tuya, pero no de forma tal que se pueda saber que habla de tu muestra. Los resultados serán muy valiosos para los investigadores de otros estudios. Se te enviará un enlace a través del cual podrás acceder a tus datos. De este modo, tanto tú como tus padres sabran qué hay en tu muestra en comparación con las muestras de otras personas similares a ti (del mismo sexo y edad).</p>
<p style='font-weight: bold;'><i>¿Puedes decidir no participar o retirarte del estudio sin recibir sanciones ni perder beneficios?</i></p>
<p>La participación en el estudio es completamente voluntaria. Puedes negarte a participar o retirarte del estudio en cualquier momento, sin tener que recibir sanciones ni perder los beneficios que te correspondan. Si decides que no deseas seguir participando en el estudio, se te pedirá que envíes un mensaje a la dirección de correo electrónico del proyecto American Gut para comunicarnos tu intención de abandonar el estudio. Se te notificará cualquier información importante obtenida durante el transcurso del estudio y que pueda afectar a tu voluntad para seguir participando.</p>
<p style='font-weight: bold;'><i>¿Se te puede retirar del estudio sin tu consentimiento?</i></p>
<p>Es posible que se te retire del estudio si no completas el formulario de consentimiento. También se te podría retirar si no sigues las instrucciones que te suministre el personal del estudio.</p>
<p style='font-weight: bold;'><i>¿Se te pagará por participar en este estudio?</i></p>
<p>No recibirás ninguna remuneración económica por participar en este estudio.</p>
<p style='font-weight: bold;'><i>¿Qué sucede si te lesionas como consecuencia directa del estudio?</i></p>
<p>Si sufres una lesión o te enfermas como consecuencia directa de este estudio, se te prestará la atención médica que necesites.</p>
<p style='font-weight: bold;'><i>¿Y la confidencialidad?</i></p>
<p>Se protegerá la confidencialidad de los registros de la investigación en la medida que lo permita la ley. Todos los datos que se introduzcan en el sitio web se guardarán en un servidor protegido por contraseña ubicado en el SDSC (San Diego Supercomputer Center), que se encuentra en la UCSD y cuenta con acceso controlado por características biométricas. El código (que indica qué código barras se colocó en tu muestra) se guarda en otro servidor protegido por contraseña, al que solo pueden acceder el prof. Knight, el coordinador de las muestras y los programadores de la base de datos. Los datos que se usan en los análisis no indican quién eres. Guardaremos tus datos en un lugar al que podrán acceder otros investigadores, pero no podrán determinar de ningún modo quién eres a partir de dichos datos; no publicaremos, por ejemplo, tu dirección. La Junta de Revisión Institucional de la UCSD podrá revisar los registros del estudio.</p>
<p style='font-weight: bold;'><i>¿A quién puedes llamar si tienes alguna duda?</i></p>
<p>Si tienes alguna duda o problemas relacionados con la investigación, puedes llamar a Rob Knight al 858-246-1184 o a Dominic Nguyen al 858-534-8739.</p>
<p>Puedes llamar a la oficina del Programa de protección para investigaciones con seres humanos al 858-246-4777 para consultar sobre tus derechos como sujeto de investigación o denunciar problemas relacionados con el estudio.</p>
<p style='font-weight: bold;'><i>Firma y asentimiento</i></p>
<p>Has recibido una copia de esta hoja de asentimiento y una copia de la “Declaración de derechos del sujeto experimental”, que deberás guardar.</p>
<p>Aceptas participar.</p>''',
'ASSENT_7_12': '''
<p align='center'>
<b>University of California, San Diego</b><br/>
Asentimiento para participar como sujeto de investigación<br/>
(de 7 a 12 años)
</p>
<p align='center'>
<b>Proyecto Gut</b>
</p>
<p>El Profesor Rob Knight y su equipo de investigación están realizando un estudio para saber más sobre los billones de diminutos seres vivos, como las bacterias, que viven dentro de tu cuerpo o en la superficie de este. Te preguntan si deseas participar en este estudio porque eres diferente (en buen sentido) de todos los demás, del mismo modo que el resto de las personas son diferentes entre sí.</p>
<p>Si decides participar en este estudio, esto es lo que sucederá:</p>
<p>Les pediremos a ti, a tu mamá o a tu papá que tomen una muestra de alguna parte de tu cuerpo (como la piel o la boca) o de tu materia fecal (tomándola del papel higiénico) con un objeto parecido a dos bastoncillos de algodón. A veces necesitamos más materia fecal, por lo que quizá te pidamos que evacúes en un tazón de plástico que se coloca debajo de la silla del inodoro y recibe la materia fecal. Tu mamá o tu papá nos enviará la materia fecal en el tazón.</p>
<p>A veces los niños no se sienten bien mientras participan en este estudio. Podrías sentir un poco de dolor en el lugar de la piel que se frota con el bastoncillo de algodón. La mayor parte de las personas no sienten este dolor.</p>
<p>Si sientes algo así o cualquier otra cosa, no dejes de decírselo a tu mamá o a tu papá.</p>
<p>No es necesario que participes en este estudio de investigación si no quieres. Nadie se molestará contigo si dices que no. Incluso si ahora dices que sí y cambias de opinión después de empezar a participar, podrás retirarte del estudio y nadie se molestará.</p>
<p>No dejes de hablar con tus padres si tienes alguna duda. Si no te explican bien lo que quieras saber, puedes llamar a Dominic Nguyen para pedirle más información. Su número de teléfono es el 858-534-8739. También hay un sitio web en Internet en que se explican las cosas: .<a href = "https://www.americangut.org">https://www.americangut.org</a></p>''',
}
| 224.577889
| 1,536
| 0.770088
| 7,186
| 44,691
| 4.788338
| 0.143473
| 0.007556
| 0.020489
| 0.025953
| 0.782121
| 0.760731
| 0.737685
| 0.725014
| 0.699962
| 0.67308
| 0
| 0.00573
| 0.179947
| 44,691
| 198
| 1,537
| 225.712121
| 0.931813
| 0.000716
| 0
| 0.216495
| 0
| 0.623711
| 0.990393
| 0.009181
| 0
| 0
| 0
| 0.005051
| 0
| 1
| 0
| false
| 0
| 0.025773
| 0
| 0.025773
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6fdbf90f3b2994b4372443757209567db770711b
| 2,003
|
py
|
Python
|
simplepy/percent.py
|
Kevys/python-games
|
4949c122c9ccc69166afd106bedf8d618fe0f12f
|
[
"MIT"
] | 1
|
2021-12-01T13:38:28.000Z
|
2021-12-01T13:38:28.000Z
|
simplepy/percent.py
|
Kevys/python-games
|
4949c122c9ccc69166afd106bedf8d618fe0f12f
|
[
"MIT"
] | null | null | null |
simplepy/percent.py
|
Kevys/python-games
|
4949c122c9ccc69166afd106bedf8d618fe0f12f
|
[
"MIT"
] | null | null | null |
from .convert import convert
class percent():
def in_percent(
number: int,
string: bool = False,
round_off: bool = False
):
number = convert(number, to=int)
x = number / 100
if round_off is True:
x = round(x)
if string is True:
x = str(x)
return x
def used(
used: int,
total: int,
string: bool = False,
round_off: bool = False
):
used = convert(used, to=int)
total = convert(total, to=int)
x = (used / total)*100
if round_off is True:
x = round(x)
if string is True:
x = str(x)
return x
def left(
left: int,
total: int,
string: bool = False,
round_off: bool = False
):
left = convert(left, to=int)
total = convert(total, to=int)
x = (used / total)*100
if round_off is True:
x = round(x)
if string is True:
x = str(x)
return x
def lower(
number: int,
amount: int,
string: bool = False,
round_off: bool = False
):
if "%" in amount:
amount = amount.replace("%", "")
number = convert(number, to=int)
amount = convert(amount, to=int)
x = number * (amount / 100)
if round_off is True:
x = round(x)
if string is True:
x = str(x)
return x
def higher(
number: int,
amount: int,
string: bool = False,
round_off: bool = False
):
if type(amount) is str:
if "%" in amount:
amount = amount.replace("%", "")
number = convert(number, to=int)
amount = convert(amount, to=int)
x = number * ((amount / 100) + 1)
if round_off is True:
x = round(x)
if string is True:
x = str(x)
return x
| 21.771739
| 48
| 0.458313
| 237
| 2,003
| 3.827004
| 0.130802
| 0.099228
| 0.077178
| 0.099228
| 0.85226
| 0.825799
| 0.825799
| 0.825799
| 0.787211
| 0.787211
| 0
| 0.014273
| 0.440339
| 2,003
| 92
| 49
| 21.771739
| 0.794826
| 0
| 0
| 0.8
| 0
| 0
| 0.001996
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.013333
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d26a4a947fd1538a8bde114a1fb524a2b3ca269a
| 150
|
py
|
Python
|
tests/python/py3implicitnamespace/namespace/sibling/__init__.py
|
sammorley-short/sphinx-autoapi
|
48ec2b341eabe2489ddd71fa553707c04129ef98
|
[
"MIT"
] | 197
|
2019-06-29T07:59:40.000Z
|
2022-03-13T14:10:54.000Z
|
tests/python/py3implicitnamespace/namespace/sibling/__init__.py
|
sammorley-short/sphinx-autoapi
|
48ec2b341eabe2489ddd71fa553707c04129ef98
|
[
"MIT"
] | 158
|
2019-07-04T09:47:12.000Z
|
2022-03-30T06:12:34.000Z
|
tests/python/py3implicitnamespace/namespace/sibling/__init__.py
|
sammorley-short/sphinx-autoapi
|
48ec2b341eabe2489ddd71fa553707c04129ef98
|
[
"MIT"
] | 91
|
2019-07-02T17:52:32.000Z
|
2022-03-29T12:34:11.000Z
|
def first_method():
"""First sibling package method."""
return 1
def second_method():
"""Second sibling package method."""
return 2
| 16.666667
| 40
| 0.64
| 18
| 150
| 5.222222
| 0.5
| 0.297872
| 0.425532
| 0.553191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.226667
| 150
| 8
| 41
| 18.75
| 0.793103
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d279288ab730ce511643ca0579cda872da2295d4
| 8,551
|
py
|
Python
|
tests/test_utility.py
|
TriPed-Robot/trip_kinematics
|
1c20aaa40ce892bc3ec23d605925add4039f4744
|
[
"MIT"
] | 13
|
2021-11-30T16:49:16.000Z
|
2022-03-24T12:35:17.000Z
|
tests/test_utility.py
|
TriPed-Robot/trip_kinematics
|
1c20aaa40ce892bc3ec23d605925add4039f4744
|
[
"MIT"
] | 8
|
2021-11-18T16:38:48.000Z
|
2022-03-24T15:46:02.000Z
|
tests/test_utility.py
|
TriPed-Robot/TriP
|
1c20aaa40ce892bc3ec23d605925add4039f4744
|
[
"MIT"
] | 4
|
2021-12-13T14:49:33.000Z
|
2022-03-15T14:39:28.000Z
|
import unittest
import numpy as np
from trip_kinematics.Utility import Rotation as R
class TestStates(unittest.TestCase):
"""Correct results were generated using scipy.spatial.transform.Rotation.
"""
def test_from_euler_to_quat(self):
from_euler_cases = [
([1, 2, 3],
[0.4359528440735657, -0.7182870182434115, 0.3106224510657039, 0.44443511344300074]),
([1, 0, 0], [0.8775825618903728, 0.479425538604203, 0.0, 0.0]),
([0, 1, 0], [0.8775825618903728, 0.0, 0.479425538604203, 0.0]),
([0, 0, 1], [0.8775825618903728, 0.0, 0.0, 0.479425538604203]),
([1.9259795237086745, -1.3166224746837234, -1.6487569080546618],
[0.6754185988029391, 0.1844402562591348, -0.7139352011035228, 0.00938279741930681]),
([1.7518638481261277, -1.4762648402511551, 2.783050177056892],
[-0.4241491835815902, 0.5252649613496548, 0.482281714511987, 0.5582101201991901]),
([-1.5637740430088356, 0.9967772877753394, -1.281072618629502],
[0.7010098084209453, -0.2935149227036473, 0.6418274036266723, -0.1024295982699634]),
([-2.018987322821037, -2.477643741973166, -1.7329756283622468],
[-0.4975785988500945, -0.562138163026927, -0.1155880687282515, -0.6504272611159234]),
([-1.547364165870453, 3.1155873968096914, 2.5081666410682404],
[-0.6610669854921286, -0.6825367059399726, 0.2141370877143812, 0.22644953831298628]),
([0.9232800471655072, 2.9636239629991614, 1.3121379453450235],
[0.3336790888032567, -0.5126269396587794, 0.7307896657891122, -0.3030154299822701]),
([-3.0969110304794603, 1.6540648344396605, -1.1444726410810802],
[0.4111294068459875, -0.5601567984771901, 0.38036785991321215, 0.6103419230982696]),
]
for euler_angles, quat in from_euler_cases:
assert np.allclose(R.from_euler('xyz', euler_angles, degrees=False).as_quat(), quat)
euler_angles_deg = np.array(euler_angles) * (180 / np.pi)
assert np.allclose(R.from_euler('xyz', euler_angles_deg, degrees=True).as_quat(), quat)
def test_from_matrix_to_quat(self):
test_cases = [
(np.array([[0.41198224566568303, -0.8337376517741568, -0.3676304629248995],
[-0.058726644927620864, -0.4269176212762076, 0.902381585483331],
[-0.9092974268256819, -0.35017548837401474, -0.2248450953661529]]),
[-0.43595284407356566, 0.7182870182434113, -0.31062245106570385, -0.4444351134430007]),
(np.array([[1., 0., 0.],
[0., 0.54030231, -0.84147098],
[0., 0.84147098, 0.54030231]]),
[0.8775825618903726, 0.47942553860420295, 0.0, 0.0]),
(np.array([[0.54030231, 0., 0.84147098],
[0., 1., 0.],
[-0.84147098, 0., 0.54030231]]),
[0.8775825618903726, 0.0, 0.47942553860420295, 0.0]),
(np.array([[0.54030231, -0.84147098, 0.],
[0.84147098, 0.54030231, 0.],
[0., 0., 1.]]),
[0.8775825618903726, 0.0, 0.0, 0.47942553860420295]),
(np.array([[-0.01958302, -0.27603141, -0.9609491],
[-0.25068215, 0.93178751, -0.26254618],
[0.96787136, 0.23575134, -0.08744336]]),
[-0.6754185988029392, -0.18444025625913482, 0.7139352011035228, -0.00938279741930680]),
(np.array([[-0.08838838, 0.98018011, 0.17729764],
[0.03312264, -0.17500364, 0.98401048],
[0.99553523, 0.09284766, -0.01699786]]),
[-0.4241491835815902, 0.5252649613496548, 0.4822817145119869, 0.5582101201991901]),
(np.array([[0.15513152, -0.23316354, 0.95998384],
[-0.52038015, 0.80671434, 0.28002943],
[-0.83972538, -0.54299793, 0.00381315]]),
[0.7010098084209453, -0.2935149227036473, 0.6418274036266723, -0.10242959826996342]),
(np.array([[0.12716755, -0.51732444, 0.84628827],
[0.7772303, -0.47810987, -0.40905258],
[0.61623167, 0.7097791, 0.34128017]]),
[0.49757859885009453, 0.5621381630269271, 0.11558806872825153, 0.6504272611159234]),
(np.array([[0.80573183, 0.00708378, -0.59223816],
[-0.59170947, -0.0342715, -0.80542248],
[-0.02600233, 0.99938745, -0.02342209]]),
[0.6610669854921286, 0.6825367059399726, -0.2141370877143812, -0.22644953831298634]),
(np.array([[-0.25174377, -0.54702511, 0.7983662],
[-0.95146476, 0.29079054, -0.10077531],
[-0.17703071, -0.78498687, -0.59367983]]),
[0.3336790888032567, -0.5126269396587794, 0.7307896657891123, -0.30301542998227005]),
(np.array([[-0.03439394, -0.92799031, -0.37101353],
[0.07572774, -0.3725858, 0.92490277],
[-0.99653518, 0.00371504, 0.0830893]]),
[0.4111294068459875, -0.5601567984771901, 0.38036785991321215, 0.6103419230982696])
]
for matrix, quat in test_cases:
assert np.allclose(R.from_matrix(matrix).as_quat(), quat)
def main():
test_cases = [
(np.array([[0.41198224566568303, -0.8337376517741568, -0.3676304629248995],
[-0.058726644927620864, -0.4269176212762076, 0.902381585483331],
[-0.9092974268256819, -0.35017548837401474, -0.2248450953661529]]),
[-0.43595284407356566, 0.7182870182434113, -0.31062245106570385, -0.4444351134430007]),
(np.array([[1., 0., 0.],
[0., 0.54030231, -0.84147098],
[0., 0.84147098, 0.54030231]]),
[0.8775825618903726, 0.47942553860420295, 0.0, 0.0]),
(np.array([[0.54030231, 0., 0.84147098],
[0., 1., 0.],
[-0.84147098, 0., 0.54030231]]),
[0.8775825618903726, 0.0, 0.47942553860420295, 0.0]),
(np.array([[0.54030231, -0.84147098, 0.],
[0.84147098, 0.54030231, 0.],
[0., 0., 1.]]),
[0.8775825618903726, 0.0, 0.0, 0.47942553860420295]),
(np.array([[-0.01958302, -0.27603141, -0.9609491],
[-0.25068215, 0.93178751, -0.26254618],
[0.96787136, 0.23575134, -0.08744336]]),
[-0.6754185988029392, -0.18444025625913482, 0.7139352011035228, -0.009382797419306801]),
(np.array([[-0.08838838, 0.98018011, 0.17729764],
[0.03312264, -0.17500364, 0.98401048],
[0.99553523, 0.09284766, -0.01699786]]),
[-0.4241491835815902, 0.5252649613496548, 0.4822817145119869, 0.5582101201991901]),
(np.array([[0.15513152, -0.23316354, 0.95998384],
[-0.52038015, 0.80671434, 0.28002943],
[-0.83972538, -0.54299793, 0.00381315]]),
[0.7010098084209453, -0.2935149227036473, 0.6418274036266723, -0.10242959826996342]),
(np.array([[0.12716755, -0.51732444, 0.84628827],
[0.7772303, -0.47810987, -0.40905258],
[0.61623167, 0.7097791, 0.34128017]]),
[0.49757859885009453, 0.5621381630269271, 0.11558806872825153, 0.6504272611159234]),
(np.array([[0.80573183, 0.00708378, -0.59223816],
[-0.59170947, -0.0342715, -0.80542248],
[-0.02600233, 0.99938745, -0.02342209]]),
[0.6610669854921286, 0.6825367059399726, -0.2141370877143812, -0.22644953831298634]),
(np.array([[-0.25174377, -0.54702511, 0.7983662],
[-0.95146476, 0.29079054, -0.10077531],
[-0.17703071, -0.78498687, -0.59367983]]),
[0.3336790888032567, -0.5126269396587794, 0.7307896657891123, -0.30301542998227005]),
(np.array([[-0.03439394, -0.92799031, -0.37101353],
[0.07572774, -0.3725858, 0.92490277],
[-0.99653518, 0.00371504, 0.0830893]]),
[0.4111294068459875, -0.5601567984771901, 0.38036785991321215, 0.6103419230982696])
]
for matrix, quat in test_cases:
assert np.allclose(R.from_matrix(matrix).as_quat(), quat)
if __name__ == '__main__':
main()
| 58.972414
| 101
| 0.565782
| 861
| 8,551
| 5.57259
| 0.228804
| 0.023343
| 0.017507
| 0.010838
| 0.772822
| 0.749896
| 0.739475
| 0.717799
| 0.717799
| 0.686119
| 0
| 0.653262
| 0.275874
| 8,551
| 144
| 102
| 59.381944
| 0.121609
| 0.008186
| 0
| 0.707692
| 1
| 0
| 0.001681
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 1
| 0.023077
| false
| 0
| 0.023077
| 0
| 0.053846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d285ba63a74f5c0dfab696e140ba1425bca654a8
| 37,395
|
py
|
Python
|
mayan/apps/document_states/tests/test_workflow_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 2
|
2021-09-12T19:41:19.000Z
|
2021-09-12T19:41:20.000Z
|
mayan/apps/document_states/tests/test_workflow_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 37
|
2021-09-13T01:00:12.000Z
|
2021-10-02T03:54:30.000Z
|
mayan/apps/document_states/tests/test_workflow_views.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:17:30.000Z
|
2021-09-22T13:17:30.000Z
|
from mayan.apps.documents.tests.base import GenericDocumentViewTestCase
from mayan.apps.documents.permissions import permission_document_type_edit
from mayan.apps.testing.tests.base import GenericViewTestCase
from ..events import (
event_workflow_template_created, event_workflow_template_edited
)
from ..models import Workflow
from ..permissions import (
permission_workflow_template_create, permission_workflow_template_delete,
permission_workflow_template_edit, permission_workflow_template_view,
permission_workflow_tools
)
from .literals import TEST_WORKFLOW_TEMPLATE_LABEL
from .mixins.workflow_instance_mixins import DocumentWorkflowTemplateViewTestMixin
from .mixins.workflow_template_mixins import (
DocumentTypeAddRemoveWorkflowTemplateViewTestMixin,
WorkflowTemplateDocumentTypeViewTestMixin, WorkflowTemplateTestMixin,
WorkflowTemplateViewTestMixin, WorkflowToolViewTestMixin
)
class DocumentTypeAddRemoveWorkflowTemplateViewTestCase(
DocumentTypeAddRemoveWorkflowTemplateViewTestMixin,
WorkflowTemplateTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_workflow_template()
def test_document_type_workflow_template_add_remove_get_view_no_permission(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_workflow_template),
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_remove_get_view_with_document_type_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertNotContains(
response=response, text=str(self.test_workflow_template),
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_remove_get_view_with_workflow_template_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_workflow_template),
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_remove_get_view_with_full_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_workflow_template),
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_view_no_permission(self):
self._clear_events()
response = self._request_test_document_type_workflow_template_add_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template not in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_view_with_document_type_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_view()
self.assertEqual(response.status_code, 200)
self.assertTrue(
self.test_workflow_template not in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_view_with_workflow_template_access(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template not in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_add_view_with_full_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_add_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.test_workflow_template in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_document_type_workflow_template_remove_view_no_permission(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self._clear_events()
response = self._request_test_document_type_workflow_template_remove_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_remove_view_with_document_type_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_remove_view()
self.assertEqual(response.status_code, 200)
self.assertTrue(
self.test_workflow_template in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_remove_view_with_workflow_template_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_remove_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_workflow_template_remove_view_with_full_access(self):
self.test_document_type.workflows.add(
self.test_workflow_template
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_document_type_workflow_template_remove_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.test_workflow_template not in self.test_document_type.workflows.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
class DocumentWorkflowTemplateViewTestCase(
DocumentWorkflowTemplateViewTestMixin, WorkflowTemplateTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_workflow_template()
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
self.test_workflow_template.document_types.add(
self.test_document_type
)
self.test_workflow_template.auto_launch = False
self.test_workflow_template.save()
def test_document_single_workflow_launch_view_no_permission(self):
self._create_test_document_stub()
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_document_single_workflow_template_launch_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_single_workflow_launch_view_with_document_access(self):
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_document_single_workflow_template_launch_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_single_workflow_launch_view_with_workflow_access(self):
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_document_single_workflow_template_launch_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_single_workflow_launch_view_with_full_access(self):
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_tools
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_document_single_workflow_template_launch_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_single_workflow_launch_view_with_full_access(self):
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_tools
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
self._clear_events()
response = self._request_test_document_single_workflow_template_launch_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTemplateDocumentTypeViewTestCase(
WorkflowTemplateDocumentTypeViewTestMixin, WorkflowTemplateTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_workflow_template()
def test_workflow_template_document_type_add_remove_get_view_no_permission(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self._clear_events()
response = self._request_test_workflow_template_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_workflow_template),
status_code=404
)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_remove_get_view_with_document_type_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_workflow_template),
status_code=404
)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_remove_get_view_with_workflow_template_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_workflow_template),
status_code=200
)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_remove_get_view_with_full_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_workflow_template),
status_code=200
)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_view_no_permission(self):
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_view_with_document_type_access(self):
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_view_with_workflow_template_access(self):
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_add_view_with_full_access(self):
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_add_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_document_type_remove_view_no_permission(self):
self.test_workflow_template.document_types.add(self.test_document_type)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_remove_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_remove_view_with_document_type_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_remove_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_remove_view_with_workflow_template_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_remove_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_document_type_remove_view_with_full_access(self):
self.test_workflow_template.document_types.add(
self.test_document_type
)
test_workflow_template_document_type_count = self.test_workflow_template.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_document_type_remove_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_template.document_types.count(),
test_workflow_template_document_type_count - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
class WorkflowTemplateViewTestCase(
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin,
GenericViewTestCase
):
def test_workflow_template_create_view_no_permission(self):
self._clear_events()
response = self._request_test_workflow_template_create_view()
self.assertEqual(response.status_code, 403)
self.assertEqual(Workflow.objects.count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_create_view_with_permission(self):
self.grant_permission(permission=permission_workflow_template_create)
self._clear_events()
response = self._request_test_workflow_template_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(Workflow.objects.count(), 1)
self.assertEqual(
Workflow.objects.all()[0].label, TEST_WORKFLOW_TEMPLATE_LABEL
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_created.id)
def test_workflow_template_delete_view_no_permission(self):
self._create_test_workflow_template()
self._clear_events()
response = self._request_test_workflow_template_delete_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template in Workflow.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_delete_view_with_access(self):
self._create_test_workflow_template()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_delete
)
self._clear_events()
response = self._request_test_workflow_template_delete_view()
self.assertEqual(response.status_code, 302)
self.assertFalse(
self.test_workflow_template in Workflow.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_edit_view_no_permission(self):
self._create_test_workflow_template()
test_workflow_template_label = self.test_workflow_template.label
self._clear_events()
response = self._request_test_workflow_template_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_template.refresh_from_db()
self.assertEqual(
self.test_workflow_template.label, test_workflow_template_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_edit_view_with_access(self):
self._create_test_workflow_template()
test_workflow_template_label = self.test_workflow_template.label
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_edit
)
self._clear_events()
response = self._request_test_workflow_template_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_template.refresh_from_db()
self.assertNotEqual(
self.test_workflow_template.label, test_workflow_template_label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_workflow_template)
self.assertEqual(events[0].verb, event_workflow_template_edited.id)
def test_workflow_template_list_view_no_permission(self):
self._create_test_workflow_template()
self._clear_events()
response = self._request_test_workflow_template_list_view()
self.assertNotContains(
response=response, text=self.test_workflow_template.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_list_view_with_access(self):
self._create_test_workflow_template()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_view
)
self._clear_events()
response = self._request_test_workflow_template_list_view()
self.assertContains(
response=response, text=self.test_workflow_template.label,
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_preview_view_no_permission(self):
self._create_test_workflow_template()
self._clear_events()
response = self._request_test_workflow_template_preview_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_template in Workflow.objects.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflow_template_preview_view_with_access(self):
self._create_test_workflow_template()
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_template_view
)
self._clear_events()
response = self._request_test_workflow_template_preview_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowTemplateDocumentViewTestCase(
WorkflowTemplateTestMixin, WorkflowTemplateViewTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_document_stub()
self._create_test_workflow_template(add_test_document_type=True)
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
def test_workflows_launch_view_no_permission(self):
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_workflow_template_launch_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_workflows_launch_view_with_permission(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_test_workflow_template_launch_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_workflows_launch_view_with_permission(self):
self.grant_access(
obj=self.test_workflow_template,
permission=permission_workflow_tools
)
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
self._clear_events()
response = self._request_test_workflow_template_launch_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class WorkflowToolViewTestCase(
WorkflowTemplateTestMixin, WorkflowToolViewTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_document_stub()
self._create_test_workflow_template(add_test_document_type=True)
self._create_test_workflow_template_state()
self._create_test_workflow_template_state()
self._create_test_workflow_template_transition()
def test_tool_launch_workflows_view_no_permission(self):
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_workflow_launch_view()
self.assertEqual(response.status_code, 403)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_tool_launch_workflows_view_with_permission(self):
self.grant_permission(permission=permission_workflow_tools)
workflow_instance_count = self.test_document.workflows.count()
self._clear_events()
response = self._request_workflow_launch_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_tool_launch_workflows_view_with_permission(self):
self.grant_permission(permission=permission_workflow_tools)
workflow_instance_count = self.test_document.workflows.count()
self.test_document.delete()
self._clear_events()
response = self._request_workflow_launch_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document.workflows.count(), workflow_instance_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| 34.307339
| 104
| 0.681455
| 3,983
| 37,395
| 5.91991
| 0.027115
| 0.177107
| 0.16625
| 0.102803
| 0.93825
| 0.935154
| 0.928411
| 0.92311
| 0.916621
| 0.914034
| 0
| 0.008402
| 0.248884
| 37,395
| 1,089
| 105
| 34.338843
| 0.831067
| 0
| 0
| 0.757803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.200999
| 1
| 0.062422
| false
| 0
| 0.011236
| 0
| 0.087391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
965e8deec87069fdc478cf178c68d8fb36fd8818
| 83
|
py
|
Python
|
movie_and_captions/youtube_api/__init__.py
|
KKawamura1/get-youtube-captions
|
1ee025583512c02d6cc1f1c0c6a69cc0270c07ac
|
[
"MIT"
] | null | null | null |
movie_and_captions/youtube_api/__init__.py
|
KKawamura1/get-youtube-captions
|
1ee025583512c02d6cc1f1c0c6a69cc0270c07ac
|
[
"MIT"
] | null | null | null |
movie_and_captions/youtube_api/__init__.py
|
KKawamura1/get-youtube-captions
|
1ee025583512c02d6cc1f1c0c6a69cc0270c07ac
|
[
"MIT"
] | null | null | null |
from .youtube_api import YoutubeAPI
from .dirty_youtube_api import DirtyYoutubeAPI
| 27.666667
| 46
| 0.879518
| 11
| 83
| 6.363636
| 0.636364
| 0.285714
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 83
| 2
| 47
| 41.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9672b4d5718d67696e00322c1934140287f550d0
| 2,263
|
py
|
Python
|
Lib/site-packages/tensorflow_core/_api/v2/dtypes/__init__.py
|
caiyongji/py36-tf2.0rc
|
c5b4b364ba14214534228570e58ef96b1a8bb6dc
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/site-packages/tensorflow_core/_api/v2/dtypes/__init__.py
|
caiyongji/py36-tf2.0rc
|
c5b4b364ba14214534228570e58ef96b1a8bb6dc
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/site-packages/tensorflow_core/_api/v2/dtypes/__init__.py
|
caiyongji/py36-tf2.0rc
|
c5b4b364ba14214534228570e58ef96b1a8bb6dc
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.dtypes namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.framework.dtypes import DType
from tensorflow.python.framework.dtypes import QUANTIZED_DTYPES
from tensorflow.python.framework.dtypes import as_dtype
from tensorflow.python.framework.dtypes import bfloat16
from tensorflow.python.framework.dtypes import bool
from tensorflow.python.framework.dtypes import complex128
from tensorflow.python.framework.dtypes import complex64
from tensorflow.python.framework.dtypes import double
from tensorflow.python.framework.dtypes import float16
from tensorflow.python.framework.dtypes import float32
from tensorflow.python.framework.dtypes import float64
from tensorflow.python.framework.dtypes import half
from tensorflow.python.framework.dtypes import int16
from tensorflow.python.framework.dtypes import int32
from tensorflow.python.framework.dtypes import int64
from tensorflow.python.framework.dtypes import int8
from tensorflow.python.framework.dtypes import qint16
from tensorflow.python.framework.dtypes import qint32
from tensorflow.python.framework.dtypes import qint8
from tensorflow.python.framework.dtypes import quint16
from tensorflow.python.framework.dtypes import quint8
from tensorflow.python.framework.dtypes import resource
from tensorflow.python.framework.dtypes import string
from tensorflow.python.framework.dtypes import uint16
from tensorflow.python.framework.dtypes import uint32
from tensorflow.python.framework.dtypes import uint64
from tensorflow.python.framework.dtypes import uint8
from tensorflow.python.framework.dtypes import variant
from tensorflow.python.ops.math_ops import cast
from tensorflow.python.ops.math_ops import complex
from tensorflow.python.ops.math_ops import saturate_cast
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "dtypes", public_apis=None, deprecation=False,
has_lite=False)
| 45.26
| 82
| 0.851525
| 301
| 2,263
| 6.262458
| 0.255814
| 0.280106
| 0.339523
| 0.430769
| 0.723607
| 0.723607
| 0.15809
| 0.051989
| 0.051989
| 0
| 0
| 0.016916
| 0.085727
| 2,263
| 49
| 83
| 46.183673
| 0.894152
| 0.071586
| 0
| 0
| 1
| 0
| 0.002867
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.871795
| 0
| 0.871795
| 0.051282
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
96a5212a3002f5834057d1964869757a25bf3226
| 3,749
|
py
|
Python
|
src/compas/geometry/shapes/_shape.py
|
ricardoavelino/compas
|
e3c7f004b8839f96bf01f9f6b21a75786c3f59fa
|
[
"MIT"
] | null | null | null |
src/compas/geometry/shapes/_shape.py
|
ricardoavelino/compas
|
e3c7f004b8839f96bf01f9f6b21a75786c3f59fa
|
[
"MIT"
] | null | null | null |
src/compas/geometry/shapes/_shape.py
|
ricardoavelino/compas
|
e3c7f004b8839f96bf01f9f6b21a75786c3f59fa
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import abc
from ..geometry import Geometry
class Shape(Geometry):
"""Base class for geometric shapes."""
@abc.abstractmethod
def to_vertices_and_faces(self, triangulated=False):
pass
def __add__(self, other):
"""Compute the boolean union using the "+" operator of this shape and another.
Parameters
----------
other : :class:`~compas.geometry.Shape`
The solid to add.
Returns
-------
:class:`~compas.geometry.Polyhedron`
The resulting solid.
Examples
--------
>>> from compas.geometry import Box, Sphere
>>> A = Box.from_width_height_depth(2, 2, 2)
>>> B = Sphere([1, 1, 1], 1.0)
>>> C = A + B # doctest: +SKIP
"""
from compas.geometry import boolean_union_mesh_mesh
from compas.geometry import Polyhedron
A = self.to_vertices_and_faces(triangulated=True)
B = other.to_vertices_and_faces(triangulated=True)
V, F = boolean_union_mesh_mesh(A, B)
return Polyhedron(V, F)
def __sub__(self, other):
"""Compute the boolean difference using the "-" operator of this shape and another.
Parameters
----------
other : :class:`~compas.geometry.Shape`
The solid to subtract.
Returns
-------
:class:`~compas.geometry.Polyhedron`
The resulting solid.
Examples
--------
>>> from compas.geometry import Box, Sphere
>>> A = Box.from_width_height_depth(2, 2, 2)
>>> B = Sphere([1, 1, 1], 1.0)
>>> C = A - B # doctest: +SKIP
"""
from compas.geometry import boolean_difference_mesh_mesh
from compas.geometry import Polyhedron
A = self.to_vertices_and_faces(triangulated=True)
B = other.to_vertices_and_faces(triangulated=True)
V, F = boolean_difference_mesh_mesh(A, B)
return Polyhedron(V, F)
def __and__(self, other):
"""Compute the boolean intersection using the "&" operator of this shape and another.
Parameters
----------
other : :class:`~compas.geometry.Shape`
The solid to intersect with.
Returns
-------
:class:`~compas.geometry.Polyhedron`
The resulting solid.
Examples
--------
>>> from compas.geometry import Box, Sphere
>>> A = Box.from_width_height_depth(2, 2, 2)
>>> B = Sphere([1, 1, 1], 1.0)
>>> C = A & B # doctest: +SKIP
"""
from compas.geometry import boolean_intersection_mesh_mesh
from compas.geometry import Polyhedron
A = self.to_vertices_and_faces(triangulated=True)
B = other.to_vertices_and_faces(triangulated=True)
V, F = boolean_intersection_mesh_mesh(A, B)
return Polyhedron(V, F)
def __or__(self, other):
"""Compute the boolean union using the "|" operator of this shape and another.
Parameters
----------
other : :class:`~compas.geometry.Shape`
The solid to add.
Returns
-------
:class:`~compas.geometry.Polyhedron`
The resulting solid.
Examples
--------
>>> from compas.geometry import Box, Sphere
>>> A = Box.from_width_height_depth(2, 2, 2)
>>> B = Sphere([1, 1, 1], 1.0)
>>> C = A | B # doctest: +SKIP
"""
return self.__add__(other)
| 30.729508
| 93
| 0.551614
| 415
| 3,749
| 4.778313
| 0.163855
| 0.12708
| 0.090772
| 0.121029
| 0.820978
| 0.794755
| 0.794755
| 0.794755
| 0.794755
| 0.747857
| 0
| 0.012749
| 0.330488
| 3,749
| 121
| 94
| 30.983471
| 0.777291
| 0.489731
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15625
| false
| 0.03125
| 0.34375
| 0
| 0.65625
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
73fbd78323916eff5a47287d192542c9f4f88aaa
| 58,652
|
py
|
Python
|
venv/lib/python3.8/site-packages/cairocffi/_generated/ffi.py
|
sakthipriya-07/BuildingConstructionMaterialsSupply
|
e4b32d97eb6e574e78b955a03a0717bc7b5d13d4
|
[
"MIT"
] | 2
|
2019-12-06T15:40:14.000Z
|
2020-07-29T21:30:35.000Z
|
venv/lib/python3.8/site-packages/cairocffi/_generated/ffi.py
|
sakthipriya-07/BuildingConstructionMaterialsSupply
|
e4b32d97eb6e574e78b955a03a0717bc7b5d13d4
|
[
"MIT"
] | 13
|
2020-03-24T17:53:51.000Z
|
2022-02-10T20:01:14.000Z
|
myvenv/lib/python3.6/site-packages/cairocffi/_generated/ffi.py
|
yog240597/saleor
|
b75a23827a4ec2ce91637f0afe6808c9d09da00a
|
[
"CC-BY-4.0"
] | null | null | null |
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI('cairocffi._generated.ffi',
_version = 0x2601,
_types = b'\x00\x02\xA1\x0D\x00\x04\x80\x03\x00\x00\x00\x0F\x00\x02\xA1\x0D\x00\x04\x95\x03\x00\x00\x00\x0F\x00\x00\x7D\x0D\x00\x04\x92\x03\x00\x00\x00\x0F\x00\x00\x0A\x0D\x00\x04\x7C\x03\x00\x00\x00\x0F\x00\x00\x0A\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x04\x7D\x0D\x00\x00\x0A\x11\x00\x00\x00\x0F\x00\x02\xE1\x0D\x00\x04\x88\x03\x00\x00\x00\x0F\x00\x03\xD7\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x02\xE5\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x04\x7F\x03\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x04\x91\x03\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x04\x99\x03\x00\x00\x07\x0B\x00\x00\x09\x0B\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x04\x9D\x03\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x00\x2A\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x00\x1C\x0D\x00\x00\x00\x0F\x00\x00\xC0\x0D\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\xC0\x0D\x00\x00\x00\x0F\x00\x00\x26\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x04\x81\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x04\x81\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x00\x27\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x01\x96\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x02\xB8\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x02\xA9\x0D\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x02\xAD\x0D\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x03\xF6\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x03\xFA\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x04\x06\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x02\xDA\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x02\xDA\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x2A\x11\x00\x00\x02\x0B\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x13\x0D\x00\x00\x00\x0F\x00\x04\x89\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x03\x49\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x04\x8F\x0D\x00\x04\x90\x03\x00\x04\x8C\x03\x00\x00\x00\x0F\x00\x00\x95\x0D\x00\x00\x8B\x11\x00\x00\x00\x0F\x00\x00\x95\x0D\x00\x00\x8B\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x95\x0D\x00\x04\x90\x03\x00\x00\x00\x0F\x00\x00\x95\x0D\x00\x00\x8A\x11\x00\x00\x00\x0F\x00\x00\x95\x0D\x00\x00\x00\x0F\x00\x00\x1F\x0D\x00\x00\x1C\x11\x00\x04\x83\x03\x00\x00\x9E\x11\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\x1F\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x00\x1F\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x0A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x0A\x11\x00\x01\x77\x03\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x0A\x11\x00\x04\x98\x03\x00\x00\x2A\x11\x00\x04\x74\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1C\x11\x00\x00\xB2\x11\x00\x00\x2A\x11\x00\x00\xB4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x04\x80\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x04\x83\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x07\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\xB2\x11\x00\x00\x2A\x11\x00\x00\xB4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x69\x03\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x47\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x07\x01\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x5A\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x13\x11\x00\x00\x2A\x11\x00\x04\x88\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x95\x11\x00\x00\x8B\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x95\x11\x00\x00\x8A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x8A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x04\x11\x00\x04\x7E\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\xB2\x11\x00\x00\x2A\x11\x00\x00\xB4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x25\x11\x00\x00\x07\x01\x00\x02\xB8\x03\x00\x00\xE4\x11\x00\x04\x72\x03\x00\x00\xE4\x11\x00\x04\x36\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x25\x11\x00\x00\x07\x01\x00\x01\x29\x11\x00\x00\xE4\x11\x00\x01\x2B\x11\x00\x00\xE4\x11\x00\x01\x2D\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x0A\x01\x00\x00\x04\x11\x00\x04\x97\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x0A\x01\x00\x01\x3D\x03\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x1F\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x00\xAD\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x00\xB2\x11\x00\x00\x2A\x11\x00\x00\xB4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x00\x25\x11\x00\x04\x9C\x03\x00\x00\x0A\x01\x00\x00\xB4\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x07\x11\x00\x03\x8E\x03\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x04\x11\x00\x00\xB2\x11\x00\x00\x2A\x11\x00\x00\xB4\x11\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x2A\x11\x00\x04\x9C\x03\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x02\x0D\x0D\x00\x00\x2A\x11\x00\x01\x5F\x11\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x02\x8E\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x02\x92\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x02\x96\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x02\x9A\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x02\xF1\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x02\xF5\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x02\xB1\x0D\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x7D\x11\x00\x04\x8E\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x0A\x0B\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x01\x96\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x13\x11\x00\x00\x2A\x11\x00\x00\x07\x11\x00\x00\x8B\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x01\x72\x03\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\xAD\x11\x00\x00\x2A\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x00\x7D\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x01\x96\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x00\x8B\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x00\x00\x0B\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x25\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x01\x74\x11\x00\x01\x96\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x2A\x11\x00\x01\x96\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x2A\x11\x00\x01\x96\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x2A\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x04\x93\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x03\x95\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x04\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x04\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x04\x72\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\xC0\x11\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x14\x0B\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x15\x0B\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x17\x0B\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x1B\x0B\x00\x00\x00\x0F\x00\x00\x25\x0D\x00\x00\x00\x0F\x00\x00\x69\x0D\x00\x00\x0A\x11\x00\x00\x00\x0F\x00\x00\x69\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x00\x69\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x69\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x01\x11\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x01\x96\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x8A\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x8A\x11\x00\x00\x8A\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x8A\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x07\x11\x00\x04\x8E\x03\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x07\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x07\x11\x00\x00\x07\x01\x00\x00\x25\x11\x00\x00\x25\x11\x00\x00\x13\x0B\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x47\x0D\x00\x00\x00\x0F\x00\x01\x74\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x0A\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x5A\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x01\x3D\x0D\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x0A\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x1C\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x13\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x1F\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x07\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x00\x2A\x0D\x00\x00\x04\x11\x00\x00\xB2\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x0A\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1C\x11\x00\x01\x1A\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1C\x11\x00\x01\x25\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1C\x11\x00\x01\x3B\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1C\x11\x00\x01\x41\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x01\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x0B\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x0C\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x18\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC0\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x82\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC3\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC3\x11\x00\x00\x9E\x11\x00\x00\x9E\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC3\x11\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC3\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\xC3\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x9E\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x87\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x04\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x06\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\xC3\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x9E\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x01\x03\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x01\x07\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x02\xFE\x03\x00\x02\xFF\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x01\x9F\x03\x00\x03\x37\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x08\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x00\x2A\x11\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x13\x11\x00\x03\x33\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x8A\x03\x00\x00\xE4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x8B\x03\x00\x00\xE4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x8D\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x95\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x95\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x8A\x11\x00\x04\x8C\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x8A\x11\x00\x00\x07\x01\x00\x03\x55\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x01\x1D\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x00\xC0\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x04\x82\x03\x00\x00\x07\x01\x00\x01\x3F\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x00\xC3\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x1F\x11\x00\x00\x25\x11\x00\x01\x3F\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\xC0\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x12\x0B\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x02\x07\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x02\x0A\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x07\x11\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x1A\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x02\x10\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x25\x11\x00\x01\x5F\x03\x00\x01\x44\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x07\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x94\x03\x00\x00\xE4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x02\xA1\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x7D\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x05\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x01\x1D\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x1C\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\xC0\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x01\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x03\x69\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x03\x69\x11\x00\x00\x07\x01\x00\x01\x3F\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0D\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\xC3\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x9E\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0F\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x04\x87\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x04\x91\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x07\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x25\x11\x00\x00\x26\x11\x00\x00\x27\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x25\x11\x00\x01\x3F\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x25\x11\x00\x00\x25\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x25\x11\x00\x00\x07\x01\x00\x03\x69\x11\x00\x00\x07\x01\x00\x04\x96\x03\x00\x00\x07\x01\x00\x00\x1C\x0B\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\xD4\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x69\x03\x00\x00\x07\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x04\x11\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x04\x96\x03\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x2A\x11\x00\x00\x00\x0F\x00\x04\x9D\x0D\x00\x00\x00\x0F\x00\x03\x3E\x0D\x00\x00\x13\x11\x00\x00\x00\x0F\x00\x00\x07\x09\x00\x00\x03\x0B\x00\x00\x02\x09\x00\x00\x08\x09\x00\x00\x09\x09\x00\x00\x08\x0B\x00\x00\x03\x09\x00\x00\x0A\x09\x00\x04\x85\x03\x00\x00\x0B\x09\x00\x00\x10\x0B\x00\x00\x14\x09\x00\x00\x0C\x09\x00\x00\x11\x0B\x00\x02\x07\x03\x00\x02\x0A\x03\x00\x00\x0E\x09\x00\x00\x0F\x09\x00\x00\x0D\x09\x00\x00\x16\x0B\x00\x00\x10\x09\x00\x00\x11\x09\x00\x00\x12\x09\x00\x00\x19\x0B\x00\x02\x10\x03\x00\x00\x06\x09\x00\x00\x04\x09\x00\x00\x05\x09\x00\x00\x13\x09\x00\x00\x02\x01\x00\x00\x00\x09\x00\x00\x01\x09\x00\x00\x04\x01\x00\x00\x00\x01',
_globals = (b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_BEST',6,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_DEFAULT',0,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_FAST',4,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_GOOD',5,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_GRAY',2,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_NONE',1,b'\xFF\xFF\xFF\x0BCAIRO_ANTIALIAS_SUBPIXEL',3,b'\xFF\xFF\xFF\x0BCAIRO_CONTENT_ALPHA',8192,b'\xFF\xFF\xFF\x0BCAIRO_CONTENT_COLOR',4096,b'\xFF\xFF\xFF\x0BCAIRO_CONTENT_COLOR_ALPHA',12288,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_COGL',6,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_DRM',0,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_GL',1,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_INVALID',-1,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_SCRIPT',2,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_WIN32',7,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_XCB',3,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_XLIB',4,b'\xFF\xFF\xFF\x0BCAIRO_DEVICE_TYPE_XML',5,b'\xFF\xFF\xFF\x0BCAIRO_EXTEND_NONE',0,b'\xFF\xFF\xFF\x0BCAIRO_EXTEND_PAD',3,b'\xFF\xFF\xFF\x0BCAIRO_EXTEND_REFLECT',2,b'\xFF\xFF\xFF\x0BCAIRO_EXTEND_REPEAT',1,b'\xFF\xFF\xFF\x0BCAIRO_FILL_RULE_EVEN_ODD',1,b'\xFF\xFF\xFF\x0BCAIRO_FILL_RULE_WINDING',0,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_BEST',2,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_BILINEAR',4,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_FAST',0,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_GAUSSIAN',5,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_GOOD',1,b'\xFF\xFF\xFF\x0BCAIRO_FILTER_NEAREST',3,b'\xFF\xFF\xFF\x0BCAIRO_FONT_SLANT_ITALIC',1,b'\xFF\xFF\xFF\x0BCAIRO_FONT_SLANT_NORMAL',0,b'\xFF\xFF\xFF\x0BCAIRO_FONT_SLANT_OBLIQUE',2,b'\xFF\xFF\xFF\x0BCAIRO_FONT_TYPE_FT',1,b'\xFF\xFF\xFF\x0BCAIRO_FONT_TYPE_QUARTZ',3,b'\xFF\xFF\xFF\x0BCAIRO_FONT_TYPE_TOY',0,b'\xFF\xFF\xFF\x0BCAIRO_FONT_TYPE_USER',4,b'\xFF\xFF\xFF\x0BCAIRO_FONT_TYPE_WIN32',2,b'\xFF\xFF\xFF\x0BCAIRO_FONT_WEIGHT_BOLD',1,b'\xFF\xFF\xFF\x0BCAIRO_FONT_WEIGHT_NORMAL',0,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_A1',3,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_A8',2,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_ARGB32',0,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_INVALID',-1,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_RGB16_565',4,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_RGB24',1,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_RGB30',5,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_RGB96F',6,b'\xFF\xFF\xFF\x0BCAIRO_FORMAT_RGBA128F',7,b'\xFF\xFF\xFF\x0BCAIRO_HINT_METRICS_DEFAULT',0,b'\xFF\xFF\xFF\x0BCAIRO_HINT_METRICS_OFF',1,b'\xFF\xFF\xFF\x0BCAIRO_HINT_METRICS_ON',2,b'\xFF\xFF\xFF\x0BCAIRO_HINT_STYLE_DEFAULT',0,b'\xFF\xFF\xFF\x0BCAIRO_HINT_STYLE_FULL',4,b'\xFF\xFF\xFF\x0BCAIRO_HINT_STYLE_MEDIUM',3,b'\xFF\xFF\xFF\x0BCAIRO_HINT_STYLE_NONE',1,b'\xFF\xFF\xFF\x0BCAIRO_HINT_STYLE_SLIGHT',2,b'\xFF\xFF\xFF\x0BCAIRO_LINE_CAP_BUTT',0,b'\xFF\xFF\xFF\x0BCAIRO_LINE_CAP_ROUND',1,b'\xFF\xFF\xFF\x0BCAIRO_LINE_CAP_SQUARE',2,b'\xFF\xFF\xFF\x0BCAIRO_LINE_JOIN_BEVEL',2,b'\xFF\xFF\xFF\x0BCAIRO_LINE_JOIN_MITER',0,b'\xFF\xFF\xFF\x0BCAIRO_LINE_JOIN_ROUND',1,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_ADD',12,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_ATOP',5,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_CLEAR',0,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_COLOR_BURN',20,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_COLOR_DODGE',19,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DARKEN',17,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DEST',6,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DEST_ATOP',10,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DEST_IN',8,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DEST_OUT',9,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DEST_OVER',7,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_DIFFERENCE',23,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_EXCLUSION',24,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_HARD_LIGHT',21,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_HSL_COLOR',27,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_HSL_HUE',25,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_HSL_LUMINOSITY',28,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_HSL_SATURATION',26,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_IN',3,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_LIGHTEN',18,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_MULTIPLY',14,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_OUT',4,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_OVER',2,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_OVERLAY',16,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_SATURATE',13,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_SCREEN',15,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_SOFT_LIGHT',22,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_SOURCE',1,b'\xFF\xFF\xFF\x0BCAIRO_OPERATOR_XOR',11,b'\xFF\xFF\xFF\x0BCAIRO_PATH_CLOSE_PATH',3,b'\xFF\xFF\xFF\x0BCAIRO_PATH_CURVE_TO',2,b'\xFF\xFF\xFF\x0BCAIRO_PATH_LINE_TO',1,b'\xFF\xFF\xFF\x0BCAIRO_PATH_MOVE_TO',0,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_LINEAR',2,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_MESH',4,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_RADIAL',3,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_RASTER_SOURCE',5,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_SOLID',0,b'\xFF\xFF\xFF\x0BCAIRO_PATTERN_TYPE_SURFACE',1,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_AUTHOR',1,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_CREATE_DATE',5,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_CREATOR',4,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_KEYWORDS',3,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_MOD_DATE',6,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_SUBJECT',2,b'\xFF\xFF\xFF\x0BCAIRO_PDF_METADATA_TITLE',0,b'\xFF\xFF\xFF\x0BCAIRO_PDF_OUTLINE_FLAG_BOLD',2,b'\xFF\xFF\xFF\x0BCAIRO_PDF_OUTLINE_FLAG_ITALIC',4,b'\xFF\xFF\xFF\x0BCAIRO_PDF_OUTLINE_FLAG_OPEN',1,b'\xFF\xFF\xFF\x1FCAIRO_PDF_OUTLINE_ROOT',0,b'\xFF\xFF\xFF\x0BCAIRO_PDF_VERSION_1_4',0,b'\xFF\xFF\xFF\x0BCAIRO_PDF_VERSION_1_5',1,b'\xFF\xFF\xFF\x0BCAIRO_PS_LEVEL_2',0,b'\xFF\xFF\xFF\x0BCAIRO_PS_LEVEL_3',1,b'\xFF\xFF\xFF\x0BCAIRO_REGION_OVERLAP_IN',0,b'\xFF\xFF\xFF\x0BCAIRO_REGION_OVERLAP_OUT',1,b'\xFF\xFF\xFF\x0BCAIRO_REGION_OVERLAP_PART',2,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_CLIP_NOT_REPRESENTABLE',22,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_DEVICE_ERROR',35,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_DEVICE_FINISHED',37,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_DEVICE_TYPE_MISMATCH',34,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_FILE_NOT_FOUND',18,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_FONT_TYPE_MISMATCH',25,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_FREETYPE_ERROR',40,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_CLUSTERS',29,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_CONTENT',15,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_DASH',19,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_DSC_COMMENT',20,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_FORMAT',16,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_INDEX',21,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_MATRIX',5,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_MESH_CONSTRUCTION',36,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_PATH_DATA',9,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_POP_GROUP',3,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_RESTORE',2,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_SIZE',32,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_SLANT',30,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_STATUS',6,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_STRIDE',24,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_STRING',8,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_VISUAL',17,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_INVALID_WEIGHT',31,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_JBIG2_GLOBAL_MISSING',38,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_LAST_STATUS',43,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_NEGATIVE_COUNT',28,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_NO_CURRENT_POINT',4,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_NO_MEMORY',1,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_NULL_POINTER',7,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_PATTERN_TYPE_MISMATCH',14,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_PNG_ERROR',39,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_READ_ERROR',10,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_SUCCESS',0,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_SURFACE_FINISHED',12,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_SURFACE_TYPE_MISMATCH',13,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_TAG_ERROR',42,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_TEMP_FILE_ERROR',23,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_USER_FONT_ERROR',27,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_USER_FONT_IMMUTABLE',26,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_USER_FONT_NOT_IMPLEMENTED',33,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_WIN32_GDI_ERROR',41,b'\xFF\xFF\xFF\x0BCAIRO_STATUS_WRITE_ERROR',11,b'\xFF\xFF\xFF\x0BCAIRO_SUBPIXEL_ORDER_BGR',2,b'\xFF\xFF\xFF\x0BCAIRO_SUBPIXEL_ORDER_DEFAULT',0,b'\xFF\xFF\xFF\x0BCAIRO_SUBPIXEL_ORDER_RGB',1,b'\xFF\xFF\xFF\x0BCAIRO_SUBPIXEL_ORDER_VBGR',4,b'\xFF\xFF\xFF\x0BCAIRO_SUBPIXEL_ORDER_VRGB',3,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_OBSERVER_NORMAL',0,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_OBSERVER_RECORD_OPERATIONS',1,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_BEOS',8,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_COGL',24,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_DIRECTFB',9,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_DRM',19,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_GL',18,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_GLITZ',5,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_IMAGE',0,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_OS2',11,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_PDF',1,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_PS',2,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_QT',15,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_QUARTZ',6,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_QUARTZ_IMAGE',13,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_RECORDING',16,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_SCRIPT',14,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_SKIA',22,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_SUBSURFACE',23,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_SVG',10,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_TEE',20,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_VG',17,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_WIN32',7,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_WIN32_PRINTING',12,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_XCB',4,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_XLIB',3,b'\xFF\xFF\xFF\x0BCAIRO_SURFACE_TYPE_XML',21,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_CM',5,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_EM',1,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_EX',2,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_IN',4,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_MM',6,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_PC',8,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_PERCENT',9,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_PT',7,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_PX',3,b'\xFF\xFF\xFF\x0BCAIRO_SVG_UNIT_USER',0,b'\xFF\xFF\xFF\x0BCAIRO_SVG_VERSION_1_1',0,b'\xFF\xFF\xFF\x0BCAIRO_SVG_VERSION_1_2',1,b'\xFF\xFF\xFF\x0BCAIRO_TEXT_CLUSTER_FLAG_BACKWARD',1,b'\x00\x04\x08\x23cairo_append_path',0,b'\x00\x04\x60\x23cairo_arc',0,b'\x00\x04\x60\x23cairo_arc_negative',0,b'\x00\x03\xCA\x23cairo_clip',0,b'\x00\x04\x3D\x23cairo_clip_extents',0,b'\x00\x03\xCA\x23cairo_clip_preserve',0,b'\x00\x03\xCA\x23cairo_close_path',0,b'\x00\x00\x86\x23cairo_copy_clip_rectangle_list',0,b'\x00\x03\xCA\x23cairo_copy_page',0,b'\x00\x00\x5C\x23cairo_copy_path',0,b'\x00\x00\x5C\x23cairo_copy_path_flat',0,b'\x00\x01\xF7\x23cairo_create',0,b'\x00\x04\x68\x23cairo_curve_to',0,b'\x00\x04\x77\x23cairo_debug_reset_static_data',0,b'\x00\x03\xCA\x23cairo_destroy',0,b'\x00\x00\xA8\x23cairo_device_acquire',0,b'\x00\x02\x86\x23cairo_device_destroy',0,b'\x00\x02\x86\x23cairo_device_finish',0,b'\x00\x02\x86\x23cairo_device_flush',0,b'\x00\x02\x53\x23cairo_device_get_reference_count',0,b'\x00\x00\x0F\x23cairo_device_get_type',0,b'\x00\x02\x68\x23cairo_device_get_user_data',0,b'\x00\x02\x14\x23cairo_device_observer_elapsed',0,b'\x00\x02\x14\x23cairo_device_observer_fill_elapsed',0,b'\x00\x02\x14\x23cairo_device_observer_glyphs_elapsed',0,b'\x00\x02\x14\x23cairo_device_observer_mask_elapsed',0,b'\x00\x02\x14\x23cairo_device_observer_paint_elapsed',0,b'\x00\x00\xAB\x23cairo_device_observer_print',0,b'\x00\x02\x14\x23cairo_device_observer_stroke_elapsed',0,b'\x00\x00\x09\x23cairo_device_reference',0,b'\x00\x02\x86\x23cairo_device_release',0,b'\x00\x00\xB0\x23cairo_device_set_user_data',0,b'\x00\x00\xA8\x23cairo_device_status',0,b'\x00\x04\x38\x23cairo_device_to_user',0,b'\x00\x04\x38\x23cairo_device_to_user_distance',0,b'\x00\x03\xCA\x23cairo_fill',0,b'\x00\x04\x3D\x23cairo_fill_extents',0,b'\x00\x03\xCA\x23cairo_fill_preserve',0,b'\x00\x03\xD9\x23cairo_font_extents',0,b'\x00\x02\x89\x23cairo_font_face_destroy',0,b'\x00\x02\x56\x23cairo_font_face_get_reference_count',0,b'\x00\x00\x3A\x23cairo_font_face_get_type',0,b'\x00\x02\x6C\x23cairo_font_face_get_user_data',0,b'\x00\x00\x1B\x23cairo_font_face_reference',0,b'\x00\x00\xB9\x23cairo_font_face_set_user_data',0,b'\x00\x00\xB6\x23cairo_font_face_status',0,b'\x00\x00\x32\x23cairo_font_options_copy',0,b'\x00\x00\x35\x23cairo_font_options_create',0,b'\x00\x02\x9C\x23cairo_font_options_destroy',0,b'\x00\x02\x20\x23cairo_font_options_equal',0,b'\x00\x00\x00\x23cairo_font_options_get_antialias',0,b'\x00\x00\x49\x23cairo_font_options_get_hint_metrics',0,b'\x00\x00\x4C\x23cairo_font_options_get_hint_style',0,b'\x00\x01\x8E\x23cairo_font_options_get_subpixel_order',0,b'\x00\x02\x03\x23cairo_font_options_get_variations',0,b'\x00\x02\x65\x23cairo_font_options_hash',0,b'\x00\x02\xA3\x23cairo_font_options_merge',0,b'\x00\x02\x9F\x23cairo_font_options_set_antialias',0,b'\x00\x02\xA7\x23cairo_font_options_set_hint_metrics',0,b'\x00\x02\xAB\x23cairo_font_options_set_hint_style',0,b'\x00\x02\xAF\x23cairo_font_options_set_subpixel_order',0,b'\x00\x02\xB3\x23cairo_font_options_set_variations',0,b'\x00\x00\xBF\x23cairo_font_options_status',0,b'\x00\x02\x24\x23cairo_format_stride_for_width',0,b'\x00\x00\x03\x23cairo_get_antialias',0,b'\x00\x04\x38\x23cairo_get_current_point',0,b'\x00\x04\x38\x23cairo_get_dash',0,b'\x00\x02\x46\x23cairo_get_dash_count',0,b'\x00\x00\x15\x23cairo_get_fill_rule',0,b'\x00\x00\x21\x23cairo_get_font_face',0,b'\x00\x03\xFC\x23cairo_get_font_matrix',0,b'\x00\x03\xE1\x23cairo_get_font_options',0,b'\x00\x01\xCD\x23cairo_get_group_target',0,b'\x00\x00\x4F\x23cairo_get_line_cap',0,b'\x00\x00\x52\x23cairo_get_line_join',0,b'\x00\x02\x1D\x23cairo_get_line_width',0,b'\x00\x03\xFC\x23cairo_get_matrix',0,b'\x00\x02\x1D\x23cairo_get_miter_limit',0,b'\x00\x00\x55\x23cairo_get_operator',0,b'\x00\x02\x62\x23cairo_get_reference_count',0,b'\x00\x00\xA5\x23cairo_get_scaled_font',0,b'\x00\x00\x65\x23cairo_get_source',0,b'\x00\x01\xCD\x23cairo_get_target',0,b'\x00\x02\x1D\x23cairo_get_tolerance',0,b'\x00\x02\x82\x23cairo_get_user_data',0,b'\x00\x00\x46\x23cairo_glyph_allocate',0,b'\x00\x03\xEE\x23cairo_glyph_extents',0,b'\x00\x02\xB7\x23cairo_glyph_free',0,b'\x00\x03\xE9\x23cairo_glyph_path',0,b'\x00\x02\x46\x23cairo_has_current_point',0,b'\x00\x03\xCA\x23cairo_identity_matrix',0,b'\x00\x01\x95\x23cairo_image_surface_create',0,b'\x00\x01\xD8\x23cairo_image_surface_create_for_data',0,b'\x00\x01\xD0\x23cairo_image_surface_create_from_png',0,b'\x00\x01\xA5\x23cairo_image_surface_create_from_png_stream',0,b'\x00\x02\x50\x23cairo_image_surface_get_data',0,b'\x00\x00\x43\x23cairo_image_surface_get_format',0,b'\x00\x02\x34\x23cairo_image_surface_get_height',0,b'\x00\x02\x34\x23cairo_image_surface_get_stride',0,b'\x00\x02\x34\x23cairo_image_surface_get_width',0,b'\x00\x02\x49\x23cairo_in_clip',0,b'\x00\x02\x49\x23cairo_in_fill',0,b'\x00\x02\x49\x23cairo_in_stroke',0,b'\x00\x04\x4E\x23cairo_line_to',0,b'\x00\x04\x0C\x23cairo_mask',0,b'\x00\x04\x14\x23cairo_mask_surface',0,b'\x00\x02\xCB\x23cairo_matrix_init',0,b'\x00\x02\xBA\x23cairo_matrix_init_identity',0,b'\x00\x02\xC2\x23cairo_matrix_init_rotate',0,b'\x00\x02\xC6\x23cairo_matrix_init_scale',0,b'\x00\x02\xC6\x23cairo_matrix_init_translate',0,b'\x00\x00\xC2\x23cairo_matrix_invert',0,b'\x00\x02\xBD\x23cairo_matrix_multiply',0,b'\x00\x02\xC2\x23cairo_matrix_rotate',0,b'\x00\x02\xC6\x23cairo_matrix_scale',0,b'\x00\x02\xD4\x23cairo_matrix_transform_distance',0,b'\x00\x02\xD4\x23cairo_matrix_transform_point',0,b'\x00\x02\xC6\x23cairo_matrix_translate',0,b'\x00\x02\xDC\x23cairo_mesh_pattern_begin_patch',0,b'\x00\x03\x15\x23cairo_mesh_pattern_curve_to',0,b'\x00\x02\xDC\x23cairo_mesh_pattern_end_patch',0,b'\x00\x00\xF3\x23cairo_mesh_pattern_get_control_point',0,b'\x00\x00\xFA\x23cairo_mesh_pattern_get_corner_color_rgba',0,b'\x00\x00\xEF\x23cairo_mesh_pattern_get_patch_count',0,b'\x00\x00\x58\x23cairo_mesh_pattern_get_path',0,b'\x00\x03\x01\x23cairo_mesh_pattern_line_to',0,b'\x00\x03\x01\x23cairo_mesh_pattern_move_to',0,b'\x00\x03\x1E\x23cairo_mesh_pattern_set_control_point',0,b'\x00\x03\x24\x23cairo_mesh_pattern_set_corner_color_rgb',0,b'\x00\x03\x2B\x23cairo_mesh_pattern_set_corner_color_rgba',0,b'\x00\x04\x4E\x23cairo_move_to',0,b'\x00\x03\xCA\x23cairo_new_path',0,b'\x00\x03\xCA\x23cairo_new_sub_path',0,b'\x00\x03\xCA\x23cairo_paint',0,b'\x00\x04\x4A\x23cairo_paint_with_alpha',0,b'\x00\x02\xD9\x23cairo_path_destroy',0,b'\x00\x04\x3D\x23cairo_path_extents',0,b'\x00\x03\x06\x23cairo_pattern_add_color_stop_rgb',0,b'\x00\x03\x0D\x23cairo_pattern_add_color_stop_rgba',0,b'\x00\x00\x62\x23cairo_pattern_create_for_surface',0,b'\x00\x00\x6D\x23cairo_pattern_create_linear',0,b'\x00\x00\x81\x23cairo_pattern_create_mesh',0,b'\x00\x00\x73\x23cairo_pattern_create_radial',0,b'\x00\x00\x7B\x23cairo_pattern_create_raster_source',0,b'\x00\x00\x68\x23cairo_pattern_create_rgb',0,b'\x00\x00\x6D\x23cairo_pattern_create_rgba',0,b'\x00\x02\xDC\x23cairo_pattern_destroy',0,b'\x00\x00\xE2\x23cairo_pattern_get_color_stop_count',0,b'\x00\x00\xE6\x23cairo_pattern_get_color_stop_rgba',0,b'\x00\x00\x12\x23cairo_pattern_get_extend',0,b'\x00\x00\x18\x23cairo_pattern_get_filter',0,b'\x00\x00\xD2\x23cairo_pattern_get_linear_points',0,b'\x00\x02\xE7\x23cairo_pattern_get_matrix',0,b'\x00\x00\xD9\x23cairo_pattern_get_radial_circles',0,b'\x00\x02\x59\x23cairo_pattern_get_reference_count',0,b'\x00\x00\xD2\x23cairo_pattern_get_rgba',0,b'\x00\x00\xC8\x23cairo_pattern_get_surface',0,b'\x00\x00\x83\x23cairo_pattern_get_type',0,b'\x00\x02\x73\x23cairo_pattern_get_user_data',0,b'\x00\x00\x5F\x23cairo_pattern_reference',0,b'\x00\x02\xDF\x23cairo_pattern_set_extend',0,b'\x00\x02\xE3\x23cairo_pattern_set_filter',0,b'\x00\x02\xEB\x23cairo_pattern_set_matrix',0,b'\x00\x00\xCC\x23cairo_pattern_set_user_data',0,b'\x00\x00\xC5\x23cairo_pattern_status',0,b'\x00\x03\x40\x23cairo_pdf_get_versions',0,b'\x00\x02\x3F\x23cairo_pdf_surface_add_outline',0,b'\x00\x01\xD3\x23cairo_pdf_surface_create',0,b'\x00\x01\xA9\x23cairo_pdf_surface_create_for_stream',0,b'\x00\x03\x82\x23cairo_pdf_surface_restrict_to_version',0,b'\x00\x03\x7D\x23cairo_pdf_surface_set_metadata',0,b'\x00\x03\x9B\x23cairo_pdf_surface_set_page_label',0,b'\x00\x03\xB1\x23cairo_pdf_surface_set_size',0,b'\x00\x03\xBA\x23cairo_pdf_surface_set_thumbnail_size',0,b'\x00\x02\x06\x23cairo_pdf_version_to_string',0,b'\x00\x00\x65\x23cairo_pop_group',0,b'\x00\x03\xCA\x23cairo_pop_group_to_source',0,b'\x00\x03\x44\x23cairo_ps_get_levels',0,b'\x00\x02\x09\x23cairo_ps_level_to_string',0,b'\x00\x01\xD3\x23cairo_ps_surface_create',0,b'\x00\x01\xA9\x23cairo_ps_surface_create_for_stream',0,b'\x00\x03\x76\x23cairo_ps_surface_dsc_begin_page_setup',0,b'\x00\x03\x76\x23cairo_ps_surface_dsc_begin_setup',0,b'\x00\x03\x9B\x23cairo_ps_surface_dsc_comment',0,b'\x00\x02\x34\x23cairo_ps_surface_get_eps',0,b'\x00\x03\x86\x23cairo_ps_surface_restrict_to_level',0,b'\x00\x03\xB6\x23cairo_ps_surface_set_eps',0,b'\x00\x03\xB1\x23cairo_ps_surface_set_size',0,b'\x00\x03\xCA\x23cairo_push_group',0,b'\x00\x03\xD1\x23cairo_push_group_with_content',0,b'\x00\x00\x29\x23cairo_quartz_font_face_create_for_atsu_font_id',0,b'\x00\x00\x29\x23cairo_quartz_font_face_create_for_cgfont',0,b'\x00\x01\x9A\x23cairo_quartz_surface_create',0,b'\x00\x01\xEC\x23cairo_quartz_surface_create_for_cg_context',0,b'\x00\x02\x7B\x23cairo_quartz_surface_get_cg_context',0,b'\x00\x02\xF7\x23cairo_raster_source_pattern_get_acquire',0,b'\x00\x02\x70\x23cairo_raster_source_pattern_get_callback_data',0,b'\x00\x01\x8B\x23cairo_raster_source_pattern_get_copy',0,b'\x00\x04\x79\x23cairo_raster_source_pattern_get_finish',0,b'\x00\x01\x88\x23cairo_raster_source_pattern_get_snapshot',0,b'\x00\x02\xFC\x23cairo_raster_source_pattern_set_acquire',0,b'\x00\x03\x33\x23cairo_raster_source_pattern_set_callback_data',0,b'\x00\x02\xF3\x23cairo_raster_source_pattern_set_copy',0,b'\x00\x03\x3C\x23cairo_raster_source_pattern_set_finish',0,b'\x00\x02\xEF\x23cairo_raster_source_pattern_set_snapshot',0,b'\x00\x01\x91\x23cairo_recording_surface_create',0,b'\x00\x02\x37\x23cairo_recording_surface_get_extents',0,b'\x00\x03\xAA\x23cairo_recording_surface_ink_extents',0,b'\x00\x04\x59\x23cairo_rectangle',0,b'\x00\x03\x48\x23cairo_rectangle_list_destroy',0,b'\x00\x01\xFA\x23cairo_reference',0,b'\x00\x02\x2F\x23cairo_region_contains_point',0,b'\x00\x00\x89\x23cairo_region_contains_rectangle',0,b'\x00\x00\x97\x23cairo_region_copy',0,b'\x00\x00\x9A\x23cairo_region_create',0,b'\x00\x00\x8D\x23cairo_region_create_rectangle',0,b'\x00\x00\x90\x23cairo_region_create_rectangles',0,b'\x00\x03\x4B\x23cairo_region_destroy',0,b'\x00\x02\x2B\x23cairo_region_equal',0,b'\x00\x03\x53\x23cairo_region_get_extents',0,b'\x00\x03\x57\x23cairo_region_get_rectangle',0,b'\x00\x01\x10\x23cairo_region_intersect',0,b'\x00\x01\x0C\x23cairo_region_intersect_rectangle',0,b'\x00\x02\x28\x23cairo_region_is_empty',0,b'\x00\x02\x28\x23cairo_region_num_rectangles',0,b'\x00\x00\x94\x23cairo_region_reference',0,b'\x00\x01\x14\x23cairo_region_status',0,b'\x00\x01\x10\x23cairo_region_subtract',0,b'\x00\x01\x0C\x23cairo_region_subtract_rectangle',0,b'\x00\x03\x4E\x23cairo_region_translate',0,b'\x00\x01\x10\x23cairo_region_union',0,b'\x00\x01\x0C\x23cairo_region_union_rectangle',0,b'\x00\x01\x10\x23cairo_region_xor',0,b'\x00\x01\x0C\x23cairo_region_xor_rectangle',0,b'\x00\x04\x68\x23cairo_rel_curve_to',0,b'\x00\x04\x4E\x23cairo_rel_line_to',0,b'\x00\x04\x4E\x23cairo_rel_move_to',0,b'\x00\x03\xCA\x23cairo_reset_clip',0,b'\x00\x03\xCA\x23cairo_restore',0,b'\x00\x04\x4A\x23cairo_rotate',0,b'\x00\x03\xCA\x23cairo_save',0,b'\x00\x04\x4E\x23cairo_scale',0,b'\x00\x00\x9C\x23cairo_scaled_font_create',0,b'\x00\x03\x5C\x23cairo_scaled_font_destroy',0,b'\x00\x03\x5F\x23cairo_scaled_font_extents',0,b'\x00\x03\x6D\x23cairo_scaled_font_get_ctm',0,b'\x00\x00\x1E\x23cairo_scaled_font_get_font_face',0,b'\x00\x03\x6D\x23cairo_scaled_font_get_font_matrix',0,b'\x00\x03\x63\x23cairo_scaled_font_get_font_options',0,b'\x00\x02\x5C\x23cairo_scaled_font_get_reference_count',0,b'\x00\x03\x6D\x23cairo_scaled_font_get_scale_matrix',0,b'\x00\x00\x3D\x23cairo_scaled_font_get_type',0,b'\x00\x02\x77\x23cairo_scaled_font_get_user_data',0,b'\x00\x03\x67\x23cairo_scaled_font_glyph_extents',0,b'\x00\x00\xA2\x23cairo_scaled_font_reference',0,b'\x00\x01\x1F\x23cairo_scaled_font_set_user_data',0,b'\x00\x01\x17\x23cairo_scaled_font_status',0,b'\x00\x03\x71\x23cairo_scaled_font_text_extents',0,b'\x00\x01\x2F\x23cairo_scaled_font_text_to_glyphs',0,b'\x00\x04\x1E\x23cairo_select_font_face',0,b'\x00\x03\xCD\x23cairo_set_antialias',0,b'\x00\x04\x44\x23cairo_set_dash',0,b'\x00\x03\xD5\x23cairo_set_fill_rule',0,b'\x00\x03\xDD\x23cairo_set_font_face',0,b'\x00\x04\x00\x23cairo_set_font_matrix',0,b'\x00\x03\xE5\x23cairo_set_font_options',0,b'\x00\x04\x4A\x23cairo_set_font_size',0,b'\x00\x03\xF4\x23cairo_set_line_cap',0,b'\x00\x03\xF8\x23cairo_set_line_join',0,b'\x00\x04\x4A\x23cairo_set_line_width',0,b'\x00\x04\x00\x23cairo_set_matrix',0,b'\x00\x04\x4A\x23cairo_set_miter_limit',0,b'\x00\x04\x04\x23cairo_set_operator',0,b'\x00\x04\x10\x23cairo_set_scaled_font',0,b'\x00\x04\x0C\x23cairo_set_source',0,b'\x00\x04\x53\x23cairo_set_source_rgb',0,b'\x00\x04\x59\x23cairo_set_source_rgba',0,b'\x00\x04\x14\x23cairo_set_source_surface',0,b'\x00\x04\x4A\x23cairo_set_tolerance',0,b'\x00\x01\x6C\x23cairo_set_user_data',0,b'\x00\x03\xE9\x23cairo_show_glyphs',0,b'\x00\x03\xCA\x23cairo_show_page',0,b'\x00\x04\x1A\x23cairo_show_text',0,b'\x00\x04\x2E\x23cairo_show_text_glyphs',0,b'\x00\x01\x69\x23cairo_status',0,b'\x00\x02\x0C\x23cairo_status_to_string',0,b'\x00\x03\xCA\x23cairo_stroke',0,b'\x00\x04\x3D\x23cairo_stroke_extents',0,b'\x00\x03\xCA\x23cairo_stroke_preserve',0,b'\x00\x03\x76\x23cairo_surface_copy_page',0,b'\x00\x01\xC6\x23cairo_surface_create_for_rectangle',0,b'\x00\x01\xC2\x23cairo_surface_create_observer',0,b'\x00\x01\xB2\x23cairo_surface_create_similar',0,b'\x00\x01\xB8\x23cairo_surface_create_similar_image',0,b'\x00\x03\x76\x23cairo_surface_destroy',0,b'\x00\x03\x76\x23cairo_surface_finish',0,b'\x00\x03\x76\x23cairo_surface_flush',0,b'\x00\x00\x06\x23cairo_surface_get_content',0,b'\x00\x00\x0C\x23cairo_surface_get_device',0,b'\x00\x03\xA5\x23cairo_surface_get_device_offset',0,b'\x00\x03\xA5\x23cairo_surface_get_device_scale',0,b'\x00\x03\xA5\x23cairo_surface_get_fallback_resolution',0,b'\x00\x03\x79\x23cairo_surface_get_font_options',0,b'\x00\x03\x9F\x23cairo_surface_get_mime_data',0,b'\x00\x02\x5F\x23cairo_surface_get_reference_count',0,b'\x00\x01\xF1\x23cairo_surface_get_type',0,b'\x00\x02\x7E\x23cairo_surface_get_user_data',0,b'\x00\x02\x34\x23cairo_surface_has_show_text_glyphs',0,b'\x00\x01\xBE\x23cairo_surface_map_to_image',0,b'\x00\x03\x76\x23cairo_surface_mark_dirty',0,b'\x00\x03\xBF\x23cairo_surface_mark_dirty_rectangle',0,b'\x00\x01\x64\x23cairo_surface_observer_add_fill_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_finish_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_flush_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_glyphs_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_mask_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_paint_callback',0,b'\x00\x01\x64\x23cairo_surface_observer_add_stroke_callback',0,b'\x00\x02\x1A\x23cairo_surface_observer_elapsed',0,b'\x00\x01\x4D\x23cairo_surface_observer_print',0,b'\x00\x01\xAF\x23cairo_surface_reference',0,b'\x00\x03\xB1\x23cairo_surface_set_device_offset',0,b'\x00\x03\xB1\x23cairo_surface_set_device_scale',0,b'\x00\x03\xB1\x23cairo_surface_set_fallback_resolution',0,b'\x00\x01\x5C\x23cairo_surface_set_mime_data',0,b'\x00\x01\x52\x23cairo_surface_set_user_data',0,b'\x00\x03\x76\x23cairo_surface_show_page',0,b'\x00\x01\x4A\x23cairo_surface_status',0,b'\x00\x02\x3B\x23cairo_surface_supports_mime_type',0,b'\x00\x03\x8A\x23cairo_surface_unmap_image',0,b'\x00\x01\x58\x23cairo_surface_write_to_png',0,b'\x00\x01\x4D\x23cairo_surface_write_to_png_stream',0,b'\x00\x03\xC6\x23cairo_svg_get_versions',0,b'\x00\x01\xD3\x23cairo_svg_surface_create',0,b'\x00\x01\xA9\x23cairo_svg_surface_create_for_stream',0,b'\x00\x01\xF4\x23cairo_svg_surface_get_document_unit',0,b'\x00\x03\x97\x23cairo_svg_surface_restrict_to_version',0,b'\x00\x03\x93\x23cairo_svg_surface_set_document_unit',0,b'\x00\x02\x0F\x23cairo_svg_version_to_string',0,b'\x00\x04\x29\x23cairo_tag_begin',0,b'\x00\x04\x1A\x23cairo_tag_end',0,b'\x00\x01\xFD\x23cairo_text_cluster_allocate',0,b'\x00\x04\x71\x23cairo_text_cluster_free',0,b'\x00\x04\x24\x23cairo_text_extents',0,b'\x00\x04\x1A\x23cairo_text_path',0,b'\x00\x00\x24\x23cairo_toy_font_face_create',0,b'\x00\x02\x00\x23cairo_toy_font_face_get_family',0,b'\x00\x00\x37\x23cairo_toy_font_face_get_slant',0,b'\x00\x00\x40\x23cairo_toy_font_face_get_weight',0,b'\x00\x04\x00\x23cairo_transform',0,b'\x00\x04\x4E\x23cairo_translate',0,b'\x00\x00\x30\x23cairo_user_font_face_create',0,b'\x00\x01\x7C\x23cairo_user_font_face_get_init_func',0,b'\x00\x01\x82\x23cairo_user_font_face_get_render_glyph_func',0,b'\x00\x01\x7F\x23cairo_user_font_face_get_text_to_glyphs_func',0,b'\x00\x01\x85\x23cairo_user_font_face_get_unicode_to_glyph_func',0,b'\x00\x02\x8C\x23cairo_user_font_face_set_init_func',0,b'\x00\x02\x94\x23cairo_user_font_face_set_render_glyph_func',0,b'\x00\x02\x90\x23cairo_user_font_face_set_text_to_glyphs_func',0,b'\x00\x02\x98\x23cairo_user_font_face_set_unicode_to_glyph_func',0,b'\x00\x04\x38\x23cairo_user_to_device',0,b'\x00\x04\x38\x23cairo_user_to_device_distance',0,b'\x00\x02\x4E\x23cairo_version',0,b'\x00\x02\x12\x23cairo_version_string',0,b'\x00\x00\x29\x23cairo_win32_font_face_create_for_hfont',0,b'\x00\x00\x29\x23cairo_win32_font_face_create_for_logfontw',0,b'\x00\x00\x2C\x23cairo_win32_font_face_create_for_logfontw_hfont',0,b'\x00\x01\xDF\x23cairo_win32_printing_surface_create',0,b'\x00\x03\x5C\x23cairo_win32_scaled_font_done_font',0,b'\x00\x03\x6D\x23cairo_win32_scaled_font_get_device_to_logical',0,b'\x00\x03\x6D\x23cairo_win32_scaled_font_get_logical_to_device',0,b'\x00\x02\x17\x23cairo_win32_scaled_font_get_metrics_factor',0,b'\x00\x01\x46\x23cairo_win32_scaled_font_select_font',0,b'\x00\x01\xDF\x23cairo_win32_surface_create',0,b'\x00\x01\xE6\x23cairo_win32_surface_create_with_ddb',0,b'\x00\x01\x95\x23cairo_win32_surface_create_with_dib',0,b'\x00\x01\xE2\x23cairo_win32_surface_create_with_format',0,b'\x00\x02\x7B\x23cairo_win32_surface_get_dc',0,b'\x00\x01\xAF\x23cairo_win32_surface_get_image',0),
_struct_unions = ((b'\x00\x00\x04\x9A\x00\x00\x00\x02$1',b'\x00\x04\x86\x11type',b'\x00\x00\x47\x11length'),(b'\x00\x00\x04\x9B\x00\x00\x00\x02$2',b'\x00\x00\x69\x11x',b'\x00\x00\x69\x11y'),(b'\x00\x00\x04\x7E\x00\x00\x00\x02$cairo_font_extents_t',b'\x00\x00\x69\x11ascent',b'\x00\x00\x69\x11descent',b'\x00\x00\x69\x11height',b'\x00\x00\x69\x11max_x_advance',b'\x00\x00\x69\x11max_y_advance'),(b'\x00\x00\x04\x82\x00\x00\x00\x02$cairo_glyph_t',b'\x00\x01\x3D\x11index',b'\x00\x00\x69\x11x',b'\x00\x00\x69\x11y'),(b'\x00\x00\x04\x96\x00\x00\x00\x02$cairo_text_cluster_t',b'\x00\x00\x47\x11num_bytes',b'\x00\x00\x47\x11num_glyphs'),(b'\x00\x00\x04\x97\x00\x00\x00\x02$cairo_text_extents_t',b'\x00\x00\x69\x11x_bearing',b'\x00\x00\x69\x11y_bearing',b'\x00\x00\x69\x11width',b'\x00\x00\x69\x11height',b'\x00\x00\x69\x11x_advance',b'\x00\x00\x69\x11y_advance'),(b'\x00\x00\x04\x95\x00\x00\x00\x10_cairo',),(b'\x00\x00\x04\x7C\x00\x00\x00\x10_cairo_device',),(b'\x00\x00\x04\x7F\x00\x00\x00\x10_cairo_font_face',),(b'\x00\x00\x04\x80\x00\x00\x00\x10_cairo_font_options',),(b'\x00\x00\x04\x83\x00\x00\x00\x02_cairo_matrix',b'\x00\x00\x69\x11xx',b'\x00\x00\x69\x11yx',b'\x00\x00\x69\x11xy',b'\x00\x00\x69\x11yy',b'\x00\x00\x69\x11x0',b'\x00\x00\x69\x11y0'),(b'\x00\x00\x04\x85\x00\x00\x00\x03_cairo_path_data_t',b'\x00\x04\x9A\x11header',b'\x00\x04\x9B\x11point'),(b'\x00\x00\x04\x88\x00\x00\x00\x10_cairo_pattern',),(b'\x00\x00\x04\x8E\x00\x00\x00\x02_cairo_rectangle',b'\x00\x00\x69\x11x',b'\x00\x00\x69\x11y',b'\x00\x00\x69\x11width',b'\x00\x00\x69\x11height'),(b'\x00\x00\x04\x8C\x00\x00\x00\x02_cairo_rectangle_int',b'\x00\x00\x47\x11x',b'\x00\x00\x47\x11y',b'\x00\x00\x47\x11width',b'\x00\x00\x47\x11height'),(b'\x00\x00\x04\x8D\x00\x00\x00\x02_cairo_rectangle_list',b'\x00\x02\x0D\x11status',b'\x00\x02\x39\x11rectangles',b'\x00\x00\x47\x11num_rectangles'),(b'\x00\x00\x04\x90\x00\x00\x00\x10_cairo_region',),(b'\x00\x00\x04\x91\x00\x00\x00\x10_cairo_scaled_font',),(b'\x00\x00\x04\x92\x00\x00\x00\x10_cairo_surface',),(b'\x00\x00\x04\x98\x00\x00\x00\x02_cairo_user_data_key',b'\x00\x00\x47\x11unused'),(b'\x00\x00\x04\x87\x00\x00\x00\x02cairo_path',b'\x00\x02\x0D\x11status',b'\x00\x04\x84\x11data',b'\x00\x00\x47\x11num_data')),
_enums = (b'\x00\x00\x01\xC4\x00\x00\x00\x16$cairo_surface_observer_mode_t\x00CAIRO_SURFACE_OBSERVER_NORMAL,CAIRO_SURFACE_OBSERVER_RECORD_OPERATIONS',b'\x00\x00\x02\xA1\x00\x00\x00\x16_cairo_antialias\x00CAIRO_ANTIALIAS_DEFAULT,CAIRO_ANTIALIAS_NONE,CAIRO_ANTIALIAS_GRAY,CAIRO_ANTIALIAS_SUBPIXEL,CAIRO_ANTIALIAS_FAST,CAIRO_ANTIALIAS_GOOD,CAIRO_ANTIALIAS_BEST',b'\x00\x00\x00\x7D\x00\x00\x00\x16_cairo_content\x00CAIRO_CONTENT_COLOR,CAIRO_CONTENT_ALPHA,CAIRO_CONTENT_COLOR_ALPHA',b'\x00\x00\x04\x7D\x00\x00\x00\x15_cairo_device_type\x00CAIRO_DEVICE_TYPE_DRM,CAIRO_DEVICE_TYPE_GL,CAIRO_DEVICE_TYPE_SCRIPT,CAIRO_DEVICE_TYPE_XCB,CAIRO_DEVICE_TYPE_XLIB,CAIRO_DEVICE_TYPE_XML,CAIRO_DEVICE_TYPE_COGL,CAIRO_DEVICE_TYPE_WIN32,CAIRO_DEVICE_TYPE_INVALID',b'\x00\x00\x02\xE1\x00\x00\x00\x16_cairo_extend\x00CAIRO_EXTEND_NONE,CAIRO_EXTEND_REPEAT,CAIRO_EXTEND_REFLECT,CAIRO_EXTEND_PAD',b'\x00\x00\x03\xD7\x00\x00\x00\x16_cairo_fill_rule\x00CAIRO_FILL_RULE_WINDING,CAIRO_FILL_RULE_EVEN_ODD',b'\x00\x00\x02\xE5\x00\x00\x00\x16_cairo_filter\x00CAIRO_FILTER_FAST,CAIRO_FILTER_GOOD,CAIRO_FILTER_BEST,CAIRO_FILTER_NEAREST,CAIRO_FILTER_BILINEAR,CAIRO_FILTER_GAUSSIAN',b'\x00\x00\x00\x26\x00\x00\x00\x16_cairo_font_slant\x00CAIRO_FONT_SLANT_NORMAL,CAIRO_FONT_SLANT_ITALIC,CAIRO_FONT_SLANT_OBLIQUE',b'\x00\x00\x04\x81\x00\x00\x00\x16_cairo_font_type\x00CAIRO_FONT_TYPE_TOY,CAIRO_FONT_TYPE_FT,CAIRO_FONT_TYPE_WIN32,CAIRO_FONT_TYPE_QUARTZ,CAIRO_FONT_TYPE_USER',b'\x00\x00\x00\x27\x00\x00\x00\x16_cairo_font_weight\x00CAIRO_FONT_WEIGHT_NORMAL,CAIRO_FONT_WEIGHT_BOLD',b'\x00\x00\x01\x96\x00\x00\x00\x15_cairo_format\x00CAIRO_FORMAT_INVALID,CAIRO_FORMAT_ARGB32,CAIRO_FORMAT_RGB24,CAIRO_FORMAT_A8,CAIRO_FORMAT_A1,CAIRO_FORMAT_RGB16_565,CAIRO_FORMAT_RGB30,CAIRO_FORMAT_RGB96F,CAIRO_FORMAT_RGBA128F',b'\x00\x00\x02\xA9\x00\x00\x00\x16_cairo_hint_metrics\x00CAIRO_HINT_METRICS_DEFAULT,CAIRO_HINT_METRICS_OFF,CAIRO_HINT_METRICS_ON',b'\x00\x00\x02\xAD\x00\x00\x00\x16_cairo_hint_style\x00CAIRO_HINT_STYLE_DEFAULT,CAIRO_HINT_STYLE_NONE,CAIRO_HINT_STYLE_SLIGHT,CAIRO_HINT_STYLE_MEDIUM,CAIRO_HINT_STYLE_FULL',b'\x00\x00\x03\xF6\x00\x00\x00\x16_cairo_line_cap\x00CAIRO_LINE_CAP_BUTT,CAIRO_LINE_CAP_ROUND,CAIRO_LINE_CAP_SQUARE',b'\x00\x00\x03\xFA\x00\x00\x00\x16_cairo_line_join\x00CAIRO_LINE_JOIN_MITER,CAIRO_LINE_JOIN_ROUND,CAIRO_LINE_JOIN_BEVEL',b'\x00\x00\x04\x06\x00\x00\x00\x16_cairo_operator\x00CAIRO_OPERATOR_CLEAR,CAIRO_OPERATOR_SOURCE,CAIRO_OPERATOR_OVER,CAIRO_OPERATOR_IN,CAIRO_OPERATOR_OUT,CAIRO_OPERATOR_ATOP,CAIRO_OPERATOR_DEST,CAIRO_OPERATOR_DEST_OVER,CAIRO_OPERATOR_DEST_IN,CAIRO_OPERATOR_DEST_OUT,CAIRO_OPERATOR_DEST_ATOP,CAIRO_OPERATOR_XOR,CAIRO_OPERATOR_ADD,CAIRO_OPERATOR_SATURATE,CAIRO_OPERATOR_MULTIPLY,CAIRO_OPERATOR_SCREEN,CAIRO_OPERATOR_OVERLAY,CAIRO_OPERATOR_DARKEN,CAIRO_OPERATOR_LIGHTEN,CAIRO_OPERATOR_COLOR_DODGE,CAIRO_OPERATOR_COLOR_BURN,CAIRO_OPERATOR_HARD_LIGHT,CAIRO_OPERATOR_SOFT_LIGHT,CAIRO_OPERATOR_DIFFERENCE,CAIRO_OPERATOR_EXCLUSION,CAIRO_OPERATOR_HSL_HUE,CAIRO_OPERATOR_HSL_SATURATION,CAIRO_OPERATOR_HSL_COLOR,CAIRO_OPERATOR_HSL_LUMINOSITY',b'\x00\x00\x04\x86\x00\x00\x00\x16_cairo_path_data_type\x00CAIRO_PATH_MOVE_TO,CAIRO_PATH_LINE_TO,CAIRO_PATH_CURVE_TO,CAIRO_PATH_CLOSE_PATH',b'\x00\x00\x04\x89\x00\x00\x00\x16_cairo_pattern_type\x00CAIRO_PATTERN_TYPE_SOLID,CAIRO_PATTERN_TYPE_SURFACE,CAIRO_PATTERN_TYPE_LINEAR,CAIRO_PATTERN_TYPE_RADIAL,CAIRO_PATTERN_TYPE_MESH,CAIRO_PATTERN_TYPE_RASTER_SOURCE',b'\x00\x00\x03\x7F\x00\x00\x00\x16_cairo_pdf_metadata\x00CAIRO_PDF_METADATA_TITLE,CAIRO_PDF_METADATA_AUTHOR,CAIRO_PDF_METADATA_SUBJECT,CAIRO_PDF_METADATA_KEYWORDS,CAIRO_PDF_METADATA_CREATOR,CAIRO_PDF_METADATA_CREATE_DATE,CAIRO_PDF_METADATA_MOD_DATE',b'\x00\x00\x02\x44\x00\x00\x00\x16_cairo_pdf_outline_flags\x00CAIRO_PDF_OUTLINE_FLAG_OPEN,CAIRO_PDF_OUTLINE_FLAG_BOLD,CAIRO_PDF_OUTLINE_FLAG_ITALIC',b'\x00\x00\x02\x07\x00\x00\x00\x16_cairo_pdf_version\x00CAIRO_PDF_VERSION_1_4,CAIRO_PDF_VERSION_1_5',b'\x00\x00\x02\x0A\x00\x00\x00\x16_cairo_ps_level\x00CAIRO_PS_LEVEL_2,CAIRO_PS_LEVEL_3',b'\x00\x00\x04\x8F\x00\x00\x00\x16_cairo_region_overlap\x00CAIRO_REGION_OVERLAP_IN,CAIRO_REGION_OVERLAP_OUT,CAIRO_REGION_OVERLAP_PART',b'\x00\x00\x02\x0D\x00\x00\x00\x16_cairo_status\x00CAIRO_STATUS_SUCCESS,CAIRO_STATUS_NO_MEMORY,CAIRO_STATUS_INVALID_RESTORE,CAIRO_STATUS_INVALID_POP_GROUP,CAIRO_STATUS_NO_CURRENT_POINT,CAIRO_STATUS_INVALID_MATRIX,CAIRO_STATUS_INVALID_STATUS,CAIRO_STATUS_NULL_POINTER,CAIRO_STATUS_INVALID_STRING,CAIRO_STATUS_INVALID_PATH_DATA,CAIRO_STATUS_READ_ERROR,CAIRO_STATUS_WRITE_ERROR,CAIRO_STATUS_SURFACE_FINISHED,CAIRO_STATUS_SURFACE_TYPE_MISMATCH,CAIRO_STATUS_PATTERN_TYPE_MISMATCH,CAIRO_STATUS_INVALID_CONTENT,CAIRO_STATUS_INVALID_FORMAT,CAIRO_STATUS_INVALID_VISUAL,CAIRO_STATUS_FILE_NOT_FOUND,CAIRO_STATUS_INVALID_DASH,CAIRO_STATUS_INVALID_DSC_COMMENT,CAIRO_STATUS_INVALID_INDEX,CAIRO_STATUS_CLIP_NOT_REPRESENTABLE,CAIRO_STATUS_TEMP_FILE_ERROR,CAIRO_STATUS_INVALID_STRIDE,CAIRO_STATUS_FONT_TYPE_MISMATCH,CAIRO_STATUS_USER_FONT_IMMUTABLE,CAIRO_STATUS_USER_FONT_ERROR,CAIRO_STATUS_NEGATIVE_COUNT,CAIRO_STATUS_INVALID_CLUSTERS,CAIRO_STATUS_INVALID_SLANT,CAIRO_STATUS_INVALID_WEIGHT,CAIRO_STATUS_INVALID_SIZE,CAIRO_STATUS_USER_FONT_NOT_IMPLEMENTED,CAIRO_STATUS_DEVICE_TYPE_MISMATCH,CAIRO_STATUS_DEVICE_ERROR,CAIRO_STATUS_INVALID_MESH_CONSTRUCTION,CAIRO_STATUS_DEVICE_FINISHED,CAIRO_STATUS_JBIG2_GLOBAL_MISSING,CAIRO_STATUS_PNG_ERROR,CAIRO_STATUS_FREETYPE_ERROR,CAIRO_STATUS_WIN32_GDI_ERROR,CAIRO_STATUS_TAG_ERROR,CAIRO_STATUS_LAST_STATUS',b'\x00\x00\x02\xB1\x00\x00\x00\x16_cairo_subpixel_order\x00CAIRO_SUBPIXEL_ORDER_DEFAULT,CAIRO_SUBPIXEL_ORDER_RGB,CAIRO_SUBPIXEL_ORDER_BGR,CAIRO_SUBPIXEL_ORDER_VRGB,CAIRO_SUBPIXEL_ORDER_VBGR',b'\x00\x00\x04\x93\x00\x00\x00\x16_cairo_surface_type\x00CAIRO_SURFACE_TYPE_IMAGE,CAIRO_SURFACE_TYPE_PDF,CAIRO_SURFACE_TYPE_PS,CAIRO_SURFACE_TYPE_XLIB,CAIRO_SURFACE_TYPE_XCB,CAIRO_SURFACE_TYPE_GLITZ,CAIRO_SURFACE_TYPE_QUARTZ,CAIRO_SURFACE_TYPE_WIN32,CAIRO_SURFACE_TYPE_BEOS,CAIRO_SURFACE_TYPE_DIRECTFB,CAIRO_SURFACE_TYPE_SVG,CAIRO_SURFACE_TYPE_OS2,CAIRO_SURFACE_TYPE_WIN32_PRINTING,CAIRO_SURFACE_TYPE_QUARTZ_IMAGE,CAIRO_SURFACE_TYPE_SCRIPT,CAIRO_SURFACE_TYPE_QT,CAIRO_SURFACE_TYPE_RECORDING,CAIRO_SURFACE_TYPE_VG,CAIRO_SURFACE_TYPE_GL,CAIRO_SURFACE_TYPE_DRM,CAIRO_SURFACE_TYPE_TEE,CAIRO_SURFACE_TYPE_XML,CAIRO_SURFACE_TYPE_SKIA,CAIRO_SURFACE_TYPE_SUBSURFACE,CAIRO_SURFACE_TYPE_COGL',b'\x00\x00\x03\x95\x00\x00\x00\x16_cairo_svg_unit\x00CAIRO_SVG_UNIT_USER,CAIRO_SVG_UNIT_EM,CAIRO_SVG_UNIT_EX,CAIRO_SVG_UNIT_PX,CAIRO_SVG_UNIT_IN,CAIRO_SVG_UNIT_CM,CAIRO_SVG_UNIT_MM,CAIRO_SVG_UNIT_PT,CAIRO_SVG_UNIT_PC,CAIRO_SVG_UNIT_PERCENT',b'\x00\x00\x02\x10\x00\x00\x00\x16_cairo_svg_version\x00CAIRO_SVG_VERSION_1_1,CAIRO_SVG_VERSION_1_2',b'\x00\x00\x04\x36\x00\x00\x00\x16_cairo_text_cluster_flags\x00CAIRO_TEXT_CLUSTER_FLAG_BACKWARD'),
_typenames = (b'\x00\x00\x00\x2AATSUFontID',b'\x00\x00\x00\x2ACGContextRef',b'\x00\x00\x00\x2ACGFontRef',b'\x00\x00\x00\x2AHDC',b'\x00\x00\x00\x2AHFONT',b'\x00\x00\x04\x9DLOGFONTW',b'\x00\x00\x02\xA1cairo_antialias_t',b'\x00\x00\x00\x47cairo_bool_t',b'\x00\x00\x00\x7Dcairo_content_t',b'\x00\x00\x00\xB4cairo_destroy_func_t',b'\x00\x00\x04\x7Ccairo_device_t',b'\x00\x00\x04\x7Dcairo_device_type_t',b'\x00\x00\x02\xE1cairo_extend_t',b'\x00\x00\x03\xD7cairo_fill_rule_t',b'\x00\x00\x02\xE5cairo_filter_t',b'\x00\x00\x04\x7Ecairo_font_extents_t',b'\x00\x00\x04\x7Fcairo_font_face_t',b'\x00\x00\x04\x80cairo_font_options_t',b'\x00\x00\x00\x26cairo_font_slant_t',b'\x00\x00\x04\x81cairo_font_type_t',b'\x00\x00\x00\x27cairo_font_weight_t',b'\x00\x00\x01\x96cairo_format_t',b'\x00\x00\x04\x82cairo_glyph_t',b'\x00\x00\x02\xA9cairo_hint_metrics_t',b'\x00\x00\x02\xADcairo_hint_style_t',b'\x00\x00\x03\xF6cairo_line_cap_t',b'\x00\x00\x03\xFAcairo_line_join_t',b'\x00\x00\x04\x83cairo_matrix_t',b'\x00\x00\x04\x06cairo_operator_t',b'\x00\x00\x04\x85cairo_path_data_t',b'\x00\x00\x04\x86cairo_path_data_type_t',b'\x00\x00\x04\x87cairo_path_t',b'\x00\x00\x04\x88cairo_pattern_t',b'\x00\x00\x04\x89cairo_pattern_type_t',b'\x00\x00\x03\x7Fcairo_pdf_metadata_t',b'\x00\x00\x02\x44cairo_pdf_outline_flags_t',b'\x00\x00\x02\x07cairo_pdf_version_t',b'\x00\x00\x02\x0Acairo_ps_level_t',b'\x00\x00\x02\xFEcairo_raster_source_acquire_func_t',b'\x00\x00\x02\xF5cairo_raster_source_copy_func_t',b'\x00\x00\x03\x3Ecairo_raster_source_finish_func_t',b'\x00\x00\x02\xFFcairo_raster_source_release_func_t',b'\x00\x00\x02\xF1cairo_raster_source_snapshot_func_t',b'\x00\x00\x01\xA6cairo_read_func_t',b'\x00\x00\x04\x8Ccairo_rectangle_int_t',b'\x00\x00\x04\x8Dcairo_rectangle_list_t',b'\x00\x00\x04\x8Ecairo_rectangle_t',b'\x00\x00\x04\x8Fcairo_region_overlap_t',b'\x00\x00\x04\x90cairo_region_t',b'\x00\x00\x04\x91cairo_scaled_font_t',b'\x00\x00\x02\x0Dcairo_status_t',b'\x00\x00\x02\xB1cairo_subpixel_order_t',b'\x00\x00\x01\x66cairo_surface_observer_callback_t',b'\x00\x00\x01\xC4cairo_surface_observer_mode_t',b'\x00\x00\x04\x92cairo_surface_t',b'\x00\x00\x04\x93cairo_surface_type_t',b'\x00\x00\x03\x95cairo_svg_unit_t',b'\x00\x00\x02\x10cairo_svg_version_t',b'\x00\x00\x04\x95cairo_t',b'\x00\x00\x04\x36cairo_text_cluster_flags_t',b'\x00\x00\x04\x96cairo_text_cluster_t',b'\x00\x00\x04\x97cairo_text_extents_t',b'\x00\x00\x04\x98cairo_user_data_key_t',b'\x00\x00\x02\x8Ecairo_user_scaled_font_init_func_t',b'\x00\x00\x02\x96cairo_user_scaled_font_render_glyph_func_t',b'\x00\x00\x02\x92cairo_user_scaled_font_text_to_glyphs_func_t',b'\x00\x00\x02\x9Acairo_user_scaled_font_unicode_to_glyph_func_t',b'\x00\x00\x00\xADcairo_write_func_t'),
)
| 4,887.666667
| 27,875
| 0.810475
| 12,391
| 58,652
| 3.602453
| 0.056573
| 0.205923
| 0.042341
| 0.072315
| 0.681348
| 0.592253
| 0.497558
| 0.366795
| 0.325239
| 0.304651
| 0
| 0.235392
| 0.000887
| 58,652
| 11
| 27,876
| 5,332
| 0.526348
| 0.000324
| 0
| 0
| 1
| 2.666667
| 0.92359
| 0.918422
| 0
| 1
| 0.000102
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fb4d8ab9055ab5f5d1ff4559b3a2115332078cc7
| 144
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_white.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_white.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/viper/calculators/calc_white.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.bobcat.calculators.calc_white import Calc_Whitening_Bobcat
class calc_whitening_viper(Calc_Whitening_Bobcat):
pass
| 36
| 83
| 0.875
| 19
| 144
| 6.263158
| 0.631579
| 0.327731
| 0.319328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076389
| 144
| 4
| 84
| 36
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
fb4e021401f93d38e219cfc6249991af3752f8d4
| 129
|
py
|
Python
|
tests/test_clmutils.py
|
ffreemt/colab-misc-utils
|
c241aeccbeb529ff90cfaf3957f25530f5e7b8aa
|
[
"MIT"
] | null | null | null |
tests/test_clmutils.py
|
ffreemt/colab-misc-utils
|
c241aeccbeb529ff90cfaf3957f25530f5e7b8aa
|
[
"MIT"
] | 1
|
2021-01-18T15:20:25.000Z
|
2021-02-08T07:08:34.000Z
|
tests/test_clmutils.py
|
ffreemt/colab-misc-utils
|
c241aeccbeb529ff90cfaf3957f25530f5e7b8aa
|
[
"MIT"
] | 2
|
2020-12-19T03:46:35.000Z
|
2020-12-31T02:15:20.000Z
|
"""Test."""
from clmutils import __version__
def test_version():
"""Test version."""
assert __version__[:-1] == '0.1.'
| 16.125
| 37
| 0.612403
| 15
| 129
| 4.666667
| 0.6
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028302
| 0.178295
| 129
| 7
| 38
| 18.428571
| 0.632075
| 0.147287
| 0
| 0
| 0
| 0
| 0.040404
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
83886156c828d15c5b069b4b130b1b56e3c4a8fc
| 3,876
|
py
|
Python
|
tests/test_class_oelint_spaces_linebeginning.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_class_oelint_spaces_linebeginning.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_class_oelint_spaces_linebeginning.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from .base import TestBaseClass
class TestClassOelintSpacesLineBeginning(TestBaseClass):
@pytest.mark.parametrize('id', ['oelint.spaces.linebeginning'])
@pytest.mark.parametrize('occurrence', [1])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'''
A = "1"
ABC = "1"
''',
},
{
'oelint_adv_test.bb':
'''
A = "1"
ABC = "1"
''',
},
],
)
def test_bad(self, input, id, occurrence):
self.check_for_id(self._create_args(input), id, occurrence)
@pytest.mark.parametrize('id', ['oelint.spaces.linebeginning'])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'''
A = "1"
ABC = "1"
''',
},
{
'oelint_adv_test.bb':
'''
A = "1"
ABC = "1"
''',
},
{
'oelint_adv_test.bb':
'''
def foo():
return "abc"
''',
},
{
'oelint_adv_test.bb':
'''
do_foo() {
echo "abc"
}
''',
},
],
)
def test_fix(self, input, id):
self.fix_and_check(self._create_args_fix(input), id)
@pytest.mark.parametrize('id', ['oelint.spaces.linebeginning'])
@pytest.mark.parametrize('occurrence', [0])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'ABC = "1"',
},
{
'oelint_adv_test.bb':
'''
def foo():
return "abc"
''',
},
{
'oelint_adv_test.bb':
'''
do_foo() {
echo "abc"
}
''',
},
],
)
def test_good(self, input, id, occurrence):
self.check_for_id(self._create_args(input), id, occurrence)
| 40.8
| 67
| 0.217492
| 172
| 3,876
| 4.69186
| 0.209302
| 0.100372
| 0.144981
| 0.167286
| 0.801735
| 0.801735
| 0.801735
| 0.801735
| 0.750929
| 0.750929
| 0
| 0.00954
| 0.702528
| 3,876
| 94
| 68
| 41.234043
| 0.690373
| 0
| 0
| 0.322581
| 0
| 0
| 0.109985
| 0.030405
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83b2c2c4b3326983ef5c99fdd660c1f2ce611c9c
| 4,812
|
py
|
Python
|
h2o-py/tests/testdir_jira/pyunit_hexdev_289_binomial_domain_check.py
|
PawarPawan/h2o-v3
|
cf569a538c9e2ec16ba9fc1a75d14beda8f40c18
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/tests/testdir_jira/pyunit_hexdev_289_binomial_domain_check.py
|
PawarPawan/h2o-v3
|
cf569a538c9e2ec16ba9fc1a75d14beda8f40c18
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/tests/testdir_jira/pyunit_hexdev_289_binomial_domain_check.py
|
PawarPawan/h2o-v3
|
cf569a538c9e2ec16ba9fc1a75d14beda8f40c18
|
[
"Apache-2.0"
] | null | null | null |
import sys
sys.path.insert(1, "../../")
import h2o
def domain_check(ip, port):
air_train = h2o.import_file(path=h2o.locate("smalldata/airlines/AirlinesTrain.csv.zip"))
air_train.show()
air_test = h2o.import_file(path=h2o.locate("smalldata/airlines/AirlinesTest.csv.zip"))
air_test.show()
actual_domain = [u'YES',u'NO']
print "actual domain of the response: {0}".format(actual_domain)
### DRF ###
print
print "-------------- DRF:"
print
rf = h2o.random_forest(x=air_train[["Origin", "Dest", "Distance", "UniqueCarrier", "fMonth", "fDayofMonth",
"fDayOfWeek"]], y=air_train ["IsDepDelayed"].asfactor(), training_frame=air_train)
computed_domain = rf._model_json['output']['training_metrics']._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
perf = rf.model_performance(test_data=air_test)
computed_domain = perf._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
### GBM ###
print
print "-------------- GBM:"
print
gbm = h2o.gbm(x=air_train[["Origin", "Dest", "Distance", "UniqueCarrier", "fMonth", "fDayofMonth","fDayOfWeek"]],
y=air_train["IsDepDelayed"].asfactor(), training_frame=air_train, distribution="bernoulli")
computed_domain = gbm._model_json['output']['training_metrics']._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
perf = rf.model_performance(test_data=air_test)
computed_domain = perf._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
### Deeplearning ###
print
print "-------------- Deeplearning:"
print
dl = h2o.deeplearning(x=air_train[["Origin", "Dest", "Distance", "UniqueCarrier", "fMonth", "fDayofMonth","fDayOfWeek"]],
y=air_train["IsDepDelayed"].asfactor(), training_frame = air_train, activation = "Tanh",
hidden = [2, 2, 2], epochs = 10)
computed_domain = dl._model_json['output']['training_metrics']._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
perf = rf.model_performance(test_data=air_test)
computed_domain = perf._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
### GLM ###
print
print "-------------- GLM:"
print
glm = h2o.glm(x=air_train[["Origin", "Dest", "Distance", "UniqueCarrier", "fMonth", "fDayofMonth", "fDayOfWeek"]],
y=air_train["IsDepDelayed"], training_frame=air_train , family="binomial")
computed_domain = glm._model_json['output']['training_metrics']._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
perf = glm.model_performance(test_data=air_test)
computed_domain = perf._metric_json['domain']
domain_diff = list(set(computed_domain) - set(actual_domain))
assert not domain_diff, "There's a difference between the actual ({0}) and the computed ({1}) domains of the " \
"The difference is {2}".format(actual_domain, computed_domain, domain_diff)
if __name__ == "__main__":
h2o.run_test(sys.argv, domain_check)
| 54.681818
| 125
| 0.653782
| 603
| 4,812
| 4.985075
| 0.150912
| 0.111776
| 0.085163
| 0.05855
| 0.819361
| 0.819361
| 0.819361
| 0.819361
| 0.790752
| 0.790752
| 0
| 0.010688
| 0.202826
| 4,812
| 87
| 126
| 55.310345
| 0.772941
| 0.005611
| 0
| 0.565217
| 0
| 0
| 0.313932
| 0.0166
| 0
| 0
| 0
| 0
| 0.115942
| 0
| null | null | 0
| 0.057971
| null | null | 0.188406
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83ed2ef82abd11c1b07f740fe8aedabbf6a9980a
| 12,809
|
py
|
Python
|
mfl_gis/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 19
|
2015-04-16T09:37:08.000Z
|
2022-02-10T11:50:30.000Z
|
mfl_gis/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 125
|
2015-03-26T14:05:49.000Z
|
2020-05-14T08:16:50.000Z
|
mfl_gis/migrations/0001_initial.py
|
Jenks18/mfl_api
|
ecbb8954053be06bbcac7e1132811d73534c78d9
|
[
"MIT"
] | 39
|
2015-04-15T09:17:33.000Z
|
2022-03-28T18:08:16.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
import common.models.base
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
import mfl_gis.models
import uuid
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('facilities', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ConstituencyBoundary',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(max_length=100)),
('code', models.CharField(unique=True, max_length=10)),
('mpoly', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, null=True, blank=True)),
('area', models.OneToOneField(to='common.Constituency')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'constituency boundaries',
},
),
migrations.CreateModel(
name='CountyBoundary',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(max_length=100)),
('code', models.CharField(unique=True, max_length=10)),
('mpoly', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, null=True, blank=True)),
('area', models.OneToOneField(to='common.County')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'county boundaries',
},
),
migrations.CreateModel(
name='FacilityCoordinates',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('coordinates', django.contrib.gis.db.models.fields.PointField(srid=4326)),
('collection_date', models.DateTimeField(default=django.utils.timezone.now)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('facility', models.OneToOneField(related_name='facility_coordinates_through', to='facilities.Facility')),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name': 'facility coordinates',
'verbose_name_plural': 'facility coordinates',
},
bases=(mfl_gis.models.CoordinatesValidatorMixin, models.Model),
),
migrations.CreateModel(
name='GeoCodeMethod',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'The name of the method.', unique=True, max_length=100)),
('description', models.TextField(help_text=b'A short description of the method', null=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='GeoCodeSource',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(help_text=b'The name of the collecting organization', unique=True, max_length=100)),
('description', models.TextField(help_text=b'A short summary of the collecting organization', null=True, blank=True)),
('abbreviation', models.CharField(help_text=b'An acronym of the collecting or e.g SAM', max_length=10, null=True, blank=True)),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.CreateModel(
name='WardBoundary',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(max_length=100)),
('code', models.CharField(unique=True, max_length=10)),
('mpoly', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, null=True, blank=True)),
('area', models.OneToOneField(to='common.Ward')),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
'verbose_name_plural': 'ward boundaries',
},
),
migrations.CreateModel(
name='WorldBorder',
fields=[
('id', models.UUIDField(default=uuid.uuid4, serialize=False, editable=False, primary_key=True)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now)),
('deleted', models.BooleanField(default=False)),
('active', models.BooleanField(default=True, help_text=b'Indicates whether the record has been retired?')),
('search', models.CharField(max_length=255, null=True, editable=False, blank=True)),
('name', models.CharField(max_length=100)),
('code', models.CharField(unique=True, max_length=10)),
('mpoly', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, null=True, blank=True)),
('longitude', models.FloatField()),
('latitude', models.FloatField()),
('created_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-updated', '-created'),
'default_permissions': ('add', 'change', 'delete', 'view'),
'abstract': False,
},
),
migrations.AddField(
model_name='facilitycoordinates',
name='method',
field=models.ForeignKey(help_text=b'Method used to obtain the geo codes. e.g taken with GPS device', to='mfl_gis.GeoCodeMethod'),
),
migrations.AddField(
model_name='facilitycoordinates',
name='source',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='mfl_gis.GeoCodeSource', help_text=b'where the geo code came from'),
),
migrations.AddField(
model_name='facilitycoordinates',
name='updated_by',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.PROTECT, default=common.models.base.get_default_system_user_id, to=settings.AUTH_USER_MODEL),
),
]
| 65.687179
| 197
| 0.623234
| 1,340
| 12,809
| 5.80597
| 0.112687
| 0.022622
| 0.028792
| 0.045244
| 0.833548
| 0.830463
| 0.804756
| 0.79383
| 0.79383
| 0.79383
| 0
| 0.008666
| 0.23421
| 12,809
| 194
| 198
| 66.025773
| 0.784484
| 0.001639
| 0
| 0.702128
| 0
| 0
| 0.169248
| 0.005475
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047872
| 0
| 0.06383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7ed20bf290595727e46214483ad66828d28ed3a
| 4,376
|
py
|
Python
|
typesafety/pyright/create.py
|
matyasrichter/prisma-client-py
|
1b320ba32ff8c70c7481396e2ecf5a3fcbf4b5c7
|
[
"Apache-2.0"
] | null | null | null |
typesafety/pyright/create.py
|
matyasrichter/prisma-client-py
|
1b320ba32ff8c70c7481396e2ecf5a3fcbf4b5c7
|
[
"Apache-2.0"
] | 8
|
2022-03-03T02:26:41.000Z
|
2022-03-30T12:39:20.000Z
|
typesafety/pyright/create.py
|
matyasrichter/prisma-client-py
|
1b320ba32ff8c70c7481396e2ecf5a3fcbf4b5c7
|
[
"Apache-2.0"
] | null | null | null |
from prisma import Prisma
async def main(client: Prisma) -> None:
# case: missing arguments
await client.post.create() # E: Argument missing for parameter "data"
await client.post.create(
data={} # E: Argument of type "dict[Any, Any]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
)
await client.post.create(
data={ # E: Argument of type "dict[str, str]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': '',
},
)
# case: minimum required args
await client.post.create(
data={
'title': '',
'published': False,
},
)
# case: nullable field to null
await client.post.create(
data={
'title': 'foo',
'published': False,
'desc': None,
},
)
# case: setting non-null field to null
await client.post.create(
data={ # E: Argument of type "dict[str, str | None]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': 'foo',
'published': None,
}
)
# case: one-one relations are non nullable
await client.post.create(
data={ # E: Argument of type "dict[str, str | bool | None]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': 'foo',
'published': False,
'author': None,
},
)
await client.post.create(
data={ # E: Argument of type "dict[str, str | bool | dict[str, None]]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': 'foo',
'published': False,
'author': {
'create': None,
},
}
)
await client.post.create(
data={ # E: Argument of type "dict[str, str | bool | dict[str, None]]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': 'foo',
'published': False,
'author': {
'connect': None,
},
}
)
# case: one-many relations are non nullable
await client.post.create(
data={ # E: Argument of type "dict[str, str | bool | None]" cannot be assigned to parameter "data" of type "PostCreateInput" in function "create"
'title': 'foo',
'published': False,
'categories': None,
},
)
async def nested_create(client: Prisma) -> None:
# TODO: test invalid cases
# case: valid nested create one-one
await client.post.create(
data={
'title': '',
'published': False,
'author': {
'create': {
'name': 'Robert',
},
},
},
)
await client.post.create(
data={
'title': '',
'published': False,
'author': {
'connect': {'id': 'a'},
},
},
)
# case: valid nested create one-many
await client.post.create(
data={
'title': '',
'published': False,
'categories': {
'create': {
'name': 'Category',
},
},
},
)
await client.post.create(
data={
'title': '',
'published': False,
'categories': {
'create': [
{
'name': 'Category',
},
{
'name': 'Category 2',
},
],
},
},
)
await client.post.create(
data={
'title': '',
'published': False,
'categories': {
'connect': {'id': 1},
},
},
)
await client.post.create(
data={
'title': '',
'published': False,
'categories': {
'connect': [
{
'id': 1,
},
{
'id': 2,
},
],
},
},
)
| 28.232258
| 165
| 0.442642
| 384
| 4,376
| 5.041667
| 0.169271
| 0.090909
| 0.123967
| 0.173554
| 0.809917
| 0.785124
| 0.782541
| 0.782541
| 0.735537
| 0.683884
| 0
| 0.001604
| 0.430073
| 4,376
| 154
| 166
| 28.415584
| 0.774659
| 0.292733
| 0
| 0.511111
| 0
| 0
| 0.129347
| 0
| 0
| 0
| 0
| 0.006494
| 0
| 1
| 0
| true
| 0
| 0.007407
| 0
| 0.007407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7925756a4ec5634c24312e5498d5a1e76c4d852f
| 29
|
py
|
Python
|
src/PT_Pool/__init__.py
|
michalmonday/PT_Pool
|
fb43bad4e200e5b8eaf7ce620c3030a068f36905
|
[
"MIT"
] | null | null | null |
src/PT_Pool/__init__.py
|
michalmonday/PT_Pool
|
fb43bad4e200e5b8eaf7ce620c3030a068f36905
|
[
"MIT"
] | null | null | null |
src/PT_Pool/__init__.py
|
michalmonday/PT_Pool
|
fb43bad4e200e5b8eaf7ce620c3030a068f36905
|
[
"MIT"
] | null | null | null |
from .PT_Pool import PT_Pool
| 14.5
| 28
| 0.827586
| 6
| 29
| 3.666667
| 0.666667
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
793e070d4936a8fcc803e1d83015e01119f9052c
| 2,487
|
py
|
Python
|
tests/parser/tsp-austria.bk.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/tsp-austria.bk.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/tsp-austria.bk.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
% map date to weekday (mon =1, ..., sun = 7);
% The tour starts on Monday.
weekday(1,1).
weekday(D,W) :- D=D1+1, W=W1+1, weekday(D1,W1), W1 < 7.
weekday(D,1) :- D=D1+1, weekday(D1,7).
% connections with default costs (capitols of the Austrian federal states).
conn(brg,ibk,2).
conn(ibk,sbg,2).
conn(ibk,wie,5).
conn(ibk,kla,3).
conn(sbg,kla,2).
conn(sbg,gra,2).
conn(sbg,lin,1).
conn(sbg,wie,3).
conn(kla,gra,2).
conn(lin,stp,1).
conn(lin,wie,2).
conn(lin,gra,2).
conn(gra,wie,2).
conn(gra,eis,1).
conn(stp,wie,1).
conn(eis,wie,1).
conn(stp,eis,2).
conn(B,A,C) :- conn(A,B,C).
city(T) :- conn(T,_,_).
% costing: use default cost, if there are no extra costs
%
cost(A,B,W,C) :- conn(A,B,C), 0 < W, W <= 7, %#int(W), 0 < W, W <= 7,
not ecost(A,B,W).
ecost(A,B,W) :- ex_cost(A,B,W,C).
cost(A,B,W,C) :- ex_cost(A,B,W,C).
ex_cost(A,B,W,C) :- ex_cost(B,A,W,C).
% Some example of an extra cost.
ex_cost(stp,eis,2,10).
% Nicer would be, in an unstratified program,
% to have no "ex_cost" but only cost, and define
%
% ecost(A,B,W) :- cost(A,B,W,C), dcost(A,B,C1), C != C1.
% ecost(A,B,W) :- ecost(B,A,W).
"""
output = """
% map date to weekday (mon =1, ..., sun = 7);
% The tour starts on Monday.
weekday(1,1).
weekday(D,W) :- D=D1+1, W=W1+1, weekday(D1,W1), W1 < 7.
weekday(D,1) :- D=D1+1, weekday(D1,7).
% connections with default costs (capitols of the Austrian federal states).
conn(brg,ibk,2).
conn(ibk,sbg,2).
conn(ibk,wie,5).
conn(ibk,kla,3).
conn(sbg,kla,2).
conn(sbg,gra,2).
conn(sbg,lin,1).
conn(sbg,wie,3).
conn(kla,gra,2).
conn(lin,stp,1).
conn(lin,wie,2).
conn(lin,gra,2).
conn(gra,wie,2).
conn(gra,eis,1).
conn(stp,wie,1).
conn(eis,wie,1).
conn(stp,eis,2).
conn(B,A,C) :- conn(A,B,C).
city(T) :- conn(T,_,_).
% costing: use default cost, if there are no extra costs
%
cost(A,B,W,C) :- conn(A,B,C), 0 < W, W <= 7, %#int(W), 0 < W, W <= 7,
not ecost(A,B,W).
ecost(A,B,W) :- ex_cost(A,B,W,C).
cost(A,B,W,C) :- ex_cost(A,B,W,C).
ex_cost(A,B,W,C) :- ex_cost(B,A,W,C).
% Some example of an extra cost.
ex_cost(stp,eis,2,10).
% Nicer would be, in an unstratified program,
% to have no "ex_cost" but only cost, and define
%
% ecost(A,B,W) :- cost(A,B,W,C), dcost(A,B,C1), C != C1.
% ecost(A,B,W) :- ecost(B,A,W).
"""
| 20.89916
| 76
| 0.548854
| 498
| 2,487
| 2.708835
| 0.15261
| 0.038547
| 0.044477
| 0.062268
| 0.991846
| 0.991846
| 0.991846
| 0.991846
| 0.991846
| 0.991846
| 0
| 0.044444
| 0.221954
| 2,487
| 118
| 77
| 21.076271
| 0.652713
| 0
| 0
| 0.926829
| 0
| 0.121951
| 0.986936
| 0.018542
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7113a19ca443354c370f38ad63f77db03ae42db
| 5,269
|
py
|
Python
|
moment/test/test_isSameOrBefore.py
|
KrixTam/pymoment
|
b938cafc4c772df55feb3daa41286eade6f3e310
|
[
"MIT"
] | 1
|
2021-04-24T17:51:08.000Z
|
2021-04-24T17:51:08.000Z
|
moment/test/test_isSameOrBefore.py
|
KrixTam/pymoment
|
b938cafc4c772df55feb3daa41286eade6f3e310
|
[
"MIT"
] | null | null | null |
moment/test/test_isSameOrBefore.py
|
KrixTam/pymoment
|
b938cafc4c772df55feb3daa41286eade6f3e310
|
[
"MIT"
] | null | null | null |
import unittest
from moment import moment
class TestIsSameOrBefore(unittest.TestCase):
def test_default(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-2-2 13:02:09.957000 +0800')
self.assertTrue(a.isSameOrBefore([2021, 5, 1]))
self.assertFalse(a.isSameOrBefore(b))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-2-2 13:02:09.957000 +0800')
self.assertTrue(a.isSameOrBefore('2021-04-22 04:02:09.957000 +0800'))
self.assertFalse(a.isSameOrBefore(b))
def test_year(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-2-2 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'year'))
self.assertTrue(a.isSameOrBefore(b, 'year', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-1-1 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'year'))
self.assertTrue(a.isSameOrBefore(b, 'year', True))
def test_month(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-2 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'month'))
self.assertTrue(a.isSameOrBefore(b, 'month', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-1 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'month'))
self.assertTrue(a.isSameOrBefore(b, 'month', True))
def test_quarter(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-5-2 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'quarter'))
self.assertTrue(a.isSameOrBefore(b, 'quarter', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-1 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'quarter'))
self.assertTrue(a.isSameOrBefore(b, 'quarter', True))
def test_week(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-21 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'week'))
self.assertTrue(a.isSameOrBefore(b, 'week', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-18 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'week'))
self.assertTrue(a.isSameOrBefore(b, 'week', True))
def test_isoWeek(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-21 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'isoWeek'))
self.assertTrue(a.isSameOrBefore(b, 'isoWeek', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-19 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'isoWeek'))
self.assertTrue(a.isSameOrBefore(b, 'isoWeek', True))
def test_day(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'day'))
self.assertTrue(a.isSameOrBefore(b, 'day', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'day'))
self.assertTrue(a.isSameOrBefore(b, 'day', True))
def test_date(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 13:02:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'date'))
self.assertTrue(a.isSameOrBefore(b, 'date', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 0:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'date'))
self.assertTrue(a.isSameOrBefore(b, 'date', True))
def test_hour(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:12:09.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'hour'))
self.assertTrue(a.isSameOrBefore(b, 'hour', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:0:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'hour'))
self.assertTrue(a.isSameOrBefore(b, 'hour', True))
def test_minute(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:2:39.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'minute'))
self.assertTrue(a.isSameOrBefore(b, 'minute', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:2:0.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'minute'))
self.assertTrue(a.isSameOrBefore(b, 'minute', True))
def test_second(self):
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:2:9.957000 +0800')
self.assertFalse(a.isSameOrBefore(b, 'second'))
self.assertTrue(a.isSameOrBefore(b, 'second', True))
a = moment('2021-04-22 04:02:09.957000 +0800')
b = moment('2021-4-22 4:2:9.0 +0800')
self.assertFalse(a.isSameOrBefore(b, 'second'))
self.assertTrue(a.isSameOrBefore(b, 'second', True))
if __name__ == '__main__':
unittest.main()
| 43.908333
| 77
| 0.605808
| 788
| 5,269
| 4.02665
| 0.062183
| 0.13867
| 0.211787
| 0.141191
| 0.923416
| 0.913646
| 0.913646
| 0.909549
| 0.904507
| 0.889379
| 0
| 0.234469
| 0.214842
| 5,269
| 119
| 78
| 44.277311
| 0.532511
| 0
| 0
| 0.721154
| 0
| 0
| 0.291137
| 0
| 0
| 0
| 0
| 0
| 0.423077
| 1
| 0.105769
| false
| 0
| 0.019231
| 0
| 0.134615
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f718836b8c0b46908cdf57f0144d22ccc800514c
| 363,980
|
py
|
Python
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_interface_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 48.640919
| 116
| 0.675903
| 40,745
| 363,980
| 5.750497
| 0.003289
| 0.130087
| 0.106298
| 0.112162
| 0.999411
| 0.999411
| 0.999411
| 0.999411
| 0.999411
| 0.999411
| 0
| 0.001104
| 0.21385
| 363,980
| 7,483
| 117
| 48.640919
| 0.817731
| 0.030884
| 0
| 0.999326
| 1
| 0
| 0.147598
| 0.003626
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065239
| false
| 0
| 0.000169
| 0
| 0.130647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f71cab3e710d8cc552a1054d037bb361fdbacb7d
| 2,386
|
py
|
Python
|
old/test_reverse_linked_list.py
|
kurtrm/data_structures_rev
|
58f425a877898a45595de9c57c7eb8e087a0c3a2
|
[
"MIT"
] | null | null | null |
old/test_reverse_linked_list.py
|
kurtrm/data_structures_rev
|
58f425a877898a45595de9c57c7eb8e087a0c3a2
|
[
"MIT"
] | null | null | null |
old/test_reverse_linked_list.py
|
kurtrm/data_structures_rev
|
58f425a877898a45595de9c57c7eb8e087a0c3a2
|
[
"MIT"
] | null | null | null |
"""Test of the reversed linked list."""
import pytest
@pytest.fixture
def linked_list():
"""Make linked_list for testing."""
from linked_list import LinkedList
linked_list = LinkedList([1, 2, 3])
return linked_list
def test_empty_linked_list(linked_list):
"""Test exception from empty linked_list."""
from reverse_linked_list import reverse_linked_list
linked_list.pop()
linked_list.pop()
linked_list.pop()
with pytest.raises(IndexError):
reverse_linked_list(linked_list)
def test_one_in_linked_list(linked_list):
"""Test get one time back with one item in list."""
from reverse_linked_list import reverse_linked_list
linked_list.pop()
linked_list.pop()
reverse_linked_list(linked_list)
assert linked_list.head.data == 1
def test_two_in_linked_list(linked_list):
"""Test that it works with two items."""
from reverse_linked_list import reverse_linked_list
linked_list.pop()
reverse_linked_list(linked_list)
assert linked_list.head.data == 1
def test_reverse_linked_list(linked_list):
"""Test that we reverse the list."""
from reverse_linked_list import reverse_linked_list
reverse_linked_list(linked_list)
assert linked_list.head.data == 1
assert linked_list.head.next_node.data == 2
assert linked_list.head.next_node.next_node.data == 3
def test_long_reverse_linked_list(linked_list):
"""Test that we reverse the list."""
from reverse_linked_list import reverse_linked_list
linked_list.push(4)
linked_list.push(5)
reverse_linked_list(linked_list)
assert linked_list.head.data == 1
assert linked_list.head.next_node.data == 2
assert linked_list.head.next_node.next_node.data == 3
assert linked_list.head.next_node.next_node.next_node.data == 4
assert linked_list.head.next_node.next_node.next_node.next_node.data == 5
assert linked_list.head.next_node.next_node.next_node.next_node.next_node is None
reverse_linked_list(linked_list)
assert linked_list.head.data == 5
assert linked_list.head.next_node.data == 4
assert linked_list.head.next_node.next_node.data == 3
assert linked_list.head.next_node.next_node.next_node.data == 2
assert linked_list.head.next_node.next_node.next_node.next_node.data == 1
assert linked_list.head.next_node.next_node.next_node.next_node.next_node is None
| 34.57971
| 85
| 0.754401
| 373
| 2,386
| 4.490617
| 0.131367
| 0.38209
| 0.150448
| 0.200597
| 0.814925
| 0.784478
| 0.743284
| 0.743284
| 0.729552
| 0.729552
| 0
| 0.009945
| 0.157167
| 2,386
| 68
| 86
| 35.088235
| 0.822974
| 0.102682
| 0
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354167
| 1
| 0.125
| false
| 0
| 0.145833
| 0
| 0.291667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f71ea31341a506b476c8ad73d75b95e72760cafc
| 48
|
py
|
Python
|
aiomatrix/dispatcher/storage/presence/engines/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 2
|
2021-10-29T18:07:08.000Z
|
2021-11-19T00:25:43.000Z
|
aiomatrix/dispatcher/storage/presence/engines/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 1
|
2022-03-06T11:17:43.000Z
|
2022-03-06T11:17:43.000Z
|
aiomatrix/dispatcher/storage/presence/engines/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | null | null | null |
from .sqlite import SqlitePresenceStorageEngine
| 24
| 47
| 0.895833
| 4
| 48
| 10.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f72d80fec048e93c8380bd0fed8c96da03ae69b2
| 2,886
|
py
|
Python
|
2017/01.py
|
GillesArcas/Advent_of_Code
|
1f57eb1686875df2684b0d56916b1d20724e9fb9
|
[
"MIT"
] | null | null | null |
2017/01.py
|
GillesArcas/Advent_of_Code
|
1f57eb1686875df2684b0d56916b1d20724e9fb9
|
[
"MIT"
] | null | null | null |
2017/01.py
|
GillesArcas/Advent_of_Code
|
1f57eb1686875df2684b0d56916b1d20724e9fb9
|
[
"MIT"
] | null | null | null |
EXAMPLES1 = (
('1122', 3),
('1111', 4),
('1234', 0),
('91212129', 9)
)
EXAMPLES2 = (
('1212', 6),
('1221', 0),
('123425', 4),
('123123', 12),
('12131415', 4)
)
INPUT = '31813174349235972159811869755166343882958376474278437681632495222499211488649543755655138842553867246131245462881756862736922925752647341673342756514856663979496747158241792857625471323535183222497949751644488277317173496124473893452425118133645984488759128897146498831373795721661696492622276282881218371273973538163779782435211491196616375135472517935481964439956844536136823757764494967297251545389464472794474447941564778733926532741752757865243946976266426548341889873514383464142659425122786667399143335772174973128383869893325977319651839516694295534146668728822393452626321892357192574444856264721585365164945647254645264693957898373214897848424966266582991272496771159583715456714645585576641458358326521858518319315233857473695712238323787254556597566461188452279853766184333696344395818615215846348586541164194624371353556812548945447432787795489443312941687221314432694115847863129826532628228386894683392352799514942665396273726821936346663485499159141368443782475714679953213388375939519711591262489869326145476958378464652451441434846382474578535468433514121336844727988128998543975147649823215332929623574231738442281161294838499441799996857746549441142859199799125595761724782225452394593514388571187279266291364278184761833324476838939898258225748562345853633364314923186685534864178665214135631494876474186833392929124337161222959459117554238429216916532175247326391321525832362274683763488347654497889261543959591212539851835354335598844669618391876623638137926893582131945361264841733341247646125278489995838369127582438419889922365596554237153412394494932582424222479798382932335239274297663365164912953364777876187522324991837775492621675953397843833247525599771974555545348388871578347332456586949283657613841414576976542343934911424716613479249893113961925713317644349946444271959375981158445151659431844142242547191181944395897963146947935463718145169266129118413523541222444997678726644615185324461293228124456118853885552279849917342474792984425629248492847827653133583215539325866881662159421987315186914769478947389188382383546881622246793781846254253759714573354544997853153798862436887889318646643359555663135476261863'
def code1(string):
return sum(ord(x) - ord('0') for i, x in enumerate(string) if x == string[(i + 1) % len(string)])
def code2(string):
return sum(ord(x) - ord('0') for i, x in enumerate(string) if x == string[(i + len(string) // 2) % len(string)])
def test(code, examples, myinput):
for data, result in examples:
assert code(data) == result, (data, result, code(data))
print('>', code(myinput))
test(code1, EXAMPLES1, INPUT)
test(code2, EXAMPLES2, INPUT)
| 78
| 2,162
| 0.878378
| 97
| 2,886
| 26.134021
| 0.443299
| 0.010651
| 0.011834
| 0.014201
| 0.043393
| 0.043393
| 0.043393
| 0.043393
| 0.043393
| 0.043393
| 0
| 0.827124
| 0.069993
| 2,886
| 36
| 2,163
| 80.166667
| 0.117362
| 0
| 0
| 0
| 0
| 0
| 0.772552
| 0.754651
| 0
| 1
| 0
| 0
| 0.041667
| 1
| 0.125
| false
| 0
| 0
| 0.083333
| 0.208333
| 0.041667
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f788ef121ae3cfe52d67c24656d37348399e8dc2
| 27,151
|
py
|
Python
|
sdk/python/pulumi_digitalocean/spaces_bucket.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 53
|
2019-04-25T14:43:12.000Z
|
2022-03-14T15:51:44.000Z
|
sdk/python/pulumi_digitalocean/spaces_bucket.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 158
|
2019-04-15T21:47:18.000Z
|
2022-03-29T21:21:57.000Z
|
sdk/python/pulumi_digitalocean/spaces_bucket.py
|
pulumi/pulumi-digitalocean
|
b924205ec8f66f5240a755c91aa8642162038dfb
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-04-15T20:16:11.000Z
|
2021-05-28T19:08:32.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['SpacesBucketArgs', 'SpacesBucket']
@pulumi.input_type
class SpacesBucketArgs:
def __init__(__self__, *,
acl: Optional[pulumi.Input[str]] = None,
cors_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
lifecycle_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
versioning: Optional[pulumi.Input['SpacesBucketVersioningArgs']] = None):
"""
The set of arguments for constructing a SpacesBucket resource.
:param pulumi.Input[str] acl: Canned ACL applied on bucket creation (`private` or `public-read`)
:param pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]] cors_rules: A rule of Cross-Origin Resource Sharing (documented below).
:param pulumi.Input[bool] force_destroy: Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
:param pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]] lifecycle_rules: A configuration of object lifecycle management (documented below).
:param pulumi.Input[str] name: The name of the bucket
:param pulumi.Input[Union[str, 'Region']] region: The region where the bucket resides (Defaults to `nyc3`)
:param pulumi.Input['SpacesBucketVersioningArgs'] versioning: A state of versioning (documented below)
"""
if acl is not None:
pulumi.set(__self__, "acl", acl)
if cors_rules is not None:
pulumi.set(__self__, "cors_rules", cors_rules)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if lifecycle_rules is not None:
pulumi.set(__self__, "lifecycle_rules", lifecycle_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if region is not None:
pulumi.set(__self__, "region", region)
if versioning is not None:
pulumi.set(__self__, "versioning", versioning)
@property
@pulumi.getter
def acl(self) -> Optional[pulumi.Input[str]]:
"""
Canned ACL applied on bucket creation (`private` or `public-read`)
"""
return pulumi.get(self, "acl")
@acl.setter
def acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acl", value)
@property
@pulumi.getter(name="corsRules")
def cors_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]]:
"""
A rule of Cross-Origin Resource Sharing (documented below).
"""
return pulumi.get(self, "cors_rules")
@cors_rules.setter
def cors_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]]):
pulumi.set(self, "cors_rules", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter(name="lifecycleRules")
def lifecycle_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]]:
"""
A configuration of object lifecycle management (documented below).
"""
return pulumi.get(self, "lifecycle_rules")
@lifecycle_rules.setter
def lifecycle_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]]):
pulumi.set(self, "lifecycle_rules", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the bucket
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[Union[str, 'Region']]]:
"""
The region where the bucket resides (Defaults to `nyc3`)
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[Union[str, 'Region']]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def versioning(self) -> Optional[pulumi.Input['SpacesBucketVersioningArgs']]:
"""
A state of versioning (documented below)
"""
return pulumi.get(self, "versioning")
@versioning.setter
def versioning(self, value: Optional[pulumi.Input['SpacesBucketVersioningArgs']]):
pulumi.set(self, "versioning", value)
@pulumi.input_type
class _SpacesBucketState:
def __init__(__self__, *,
acl: Optional[pulumi.Input[str]] = None,
bucket_domain_name: Optional[pulumi.Input[str]] = None,
bucket_urn: Optional[pulumi.Input[str]] = None,
cors_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
lifecycle_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
versioning: Optional[pulumi.Input['SpacesBucketVersioningArgs']] = None):
"""
Input properties used for looking up and filtering SpacesBucket resources.
:param pulumi.Input[str] acl: Canned ACL applied on bucket creation (`private` or `public-read`)
:param pulumi.Input[str] bucket_domain_name: The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)
:param pulumi.Input[str] bucket_urn: The uniform resource name for the bucket
:param pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]] cors_rules: A rule of Cross-Origin Resource Sharing (documented below).
:param pulumi.Input[bool] force_destroy: Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
:param pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]] lifecycle_rules: A configuration of object lifecycle management (documented below).
:param pulumi.Input[str] name: The name of the bucket
:param pulumi.Input[Union[str, 'Region']] region: The region where the bucket resides (Defaults to `nyc3`)
:param pulumi.Input['SpacesBucketVersioningArgs'] versioning: A state of versioning (documented below)
"""
if acl is not None:
pulumi.set(__self__, "acl", acl)
if bucket_domain_name is not None:
pulumi.set(__self__, "bucket_domain_name", bucket_domain_name)
if bucket_urn is not None:
pulumi.set(__self__, "bucket_urn", bucket_urn)
if cors_rules is not None:
pulumi.set(__self__, "cors_rules", cors_rules)
if force_destroy is not None:
pulumi.set(__self__, "force_destroy", force_destroy)
if lifecycle_rules is not None:
pulumi.set(__self__, "lifecycle_rules", lifecycle_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if region is not None:
pulumi.set(__self__, "region", region)
if versioning is not None:
pulumi.set(__self__, "versioning", versioning)
@property
@pulumi.getter
def acl(self) -> Optional[pulumi.Input[str]]:
"""
Canned ACL applied on bucket creation (`private` or `public-read`)
"""
return pulumi.get(self, "acl")
@acl.setter
def acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acl", value)
@property
@pulumi.getter(name="bucketDomainName")
def bucket_domain_name(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)
"""
return pulumi.get(self, "bucket_domain_name")
@bucket_domain_name.setter
def bucket_domain_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket_domain_name", value)
@property
@pulumi.getter(name="bucketUrn")
def bucket_urn(self) -> Optional[pulumi.Input[str]]:
"""
The uniform resource name for the bucket
"""
return pulumi.get(self, "bucket_urn")
@bucket_urn.setter
def bucket_urn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket_urn", value)
@property
@pulumi.getter(name="corsRules")
def cors_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]]:
"""
A rule of Cross-Origin Resource Sharing (documented below).
"""
return pulumi.get(self, "cors_rules")
@cors_rules.setter
def cors_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketCorsRuleArgs']]]]):
pulumi.set(self, "cors_rules", value)
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> Optional[pulumi.Input[bool]]:
"""
Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
"""
return pulumi.get(self, "force_destroy")
@force_destroy.setter
def force_destroy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_destroy", value)
@property
@pulumi.getter(name="lifecycleRules")
def lifecycle_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]]:
"""
A configuration of object lifecycle management (documented below).
"""
return pulumi.get(self, "lifecycle_rules")
@lifecycle_rules.setter
def lifecycle_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SpacesBucketLifecycleRuleArgs']]]]):
pulumi.set(self, "lifecycle_rules", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the bucket
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[Union[str, 'Region']]]:
"""
The region where the bucket resides (Defaults to `nyc3`)
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[Union[str, 'Region']]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def versioning(self) -> Optional[pulumi.Input['SpacesBucketVersioningArgs']]:
"""
A state of versioning (documented below)
"""
return pulumi.get(self, "versioning")
@versioning.setter
def versioning(self, value: Optional[pulumi.Input['SpacesBucketVersioningArgs']]):
pulumi.set(self, "versioning", value)
class SpacesBucket(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[str]] = None,
cors_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketCorsRuleArgs']]]]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
lifecycle_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketLifecycleRuleArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
versioning: Optional[pulumi.Input[pulumi.InputType['SpacesBucketVersioningArgs']]] = None,
__props__=None):
"""
Provides a bucket resource for Spaces, DigitalOcean's object storage product.
The [Spaces API](https://docs.digitalocean.com/reference/api/spaces-api/) was
designed to be interoperable with Amazon's AWS S3 API. This allows users to
interact with the service while using the tools they already know. Spaces
mirrors S3's authentication framework and requests to Spaces require a key pair
similar to Amazon's Access ID and Secret Key.
The authentication requirement can be met by either setting the
`SPACES_ACCESS_KEY_ID` and `SPACES_SECRET_ACCESS_KEY` environment variables or
the provider's `spaces_access_id` and `spaces_secret_key` arguments to the
access ID and secret you generate via the DigitalOcean control panel. For
example:
```python
import pulumi
import pulumi_digitalocean as digitalocean
static_assets = digitalocean.SpacesBucket("static-assets")
# ...
```
For more information, See [An Introduction to DigitalOcean Spaces](https://www.digitalocean.com/community/tutorials/an-introduction-to-digitalocean-spaces)
## Example Usage
### Create a New Bucket
```python
import pulumi
import pulumi_digitalocean as digitalocean
foobar = digitalocean.SpacesBucket("foobar", region="nyc3")
```
### Create a New Bucket With CORS Rules
```python
import pulumi
import pulumi_digitalocean as digitalocean
foobar = digitalocean.SpacesBucket("foobar",
cors_rules=[
digitalocean.SpacesBucketCorsRuleArgs(
allowed_headers=["*"],
allowed_methods=["GET"],
allowed_origins=["*"],
max_age_seconds=3000,
),
digitalocean.SpacesBucketCorsRuleArgs(
allowed_headers=["*"],
allowed_methods=[
"PUT",
"POST",
"DELETE",
],
allowed_origins=["https://www.example.com"],
max_age_seconds=3000,
),
],
region="nyc3")
```
## Import
Buckets can be imported using the `region` and `name` attributes (delimited by a comma)
```sh
$ pulumi import digitalocean:index/spacesBucket:SpacesBucket foobar `region`,`name`
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl: Canned ACL applied on bucket creation (`private` or `public-read`)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketCorsRuleArgs']]]] cors_rules: A rule of Cross-Origin Resource Sharing (documented below).
:param pulumi.Input[bool] force_destroy: Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketLifecycleRuleArgs']]]] lifecycle_rules: A configuration of object lifecycle management (documented below).
:param pulumi.Input[str] name: The name of the bucket
:param pulumi.Input[Union[str, 'Region']] region: The region where the bucket resides (Defaults to `nyc3`)
:param pulumi.Input[pulumi.InputType['SpacesBucketVersioningArgs']] versioning: A state of versioning (documented below)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[SpacesBucketArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a bucket resource for Spaces, DigitalOcean's object storage product.
The [Spaces API](https://docs.digitalocean.com/reference/api/spaces-api/) was
designed to be interoperable with Amazon's AWS S3 API. This allows users to
interact with the service while using the tools they already know. Spaces
mirrors S3's authentication framework and requests to Spaces require a key pair
similar to Amazon's Access ID and Secret Key.
The authentication requirement can be met by either setting the
`SPACES_ACCESS_KEY_ID` and `SPACES_SECRET_ACCESS_KEY` environment variables or
the provider's `spaces_access_id` and `spaces_secret_key` arguments to the
access ID and secret you generate via the DigitalOcean control panel. For
example:
```python
import pulumi
import pulumi_digitalocean as digitalocean
static_assets = digitalocean.SpacesBucket("static-assets")
# ...
```
For more information, See [An Introduction to DigitalOcean Spaces](https://www.digitalocean.com/community/tutorials/an-introduction-to-digitalocean-spaces)
## Example Usage
### Create a New Bucket
```python
import pulumi
import pulumi_digitalocean as digitalocean
foobar = digitalocean.SpacesBucket("foobar", region="nyc3")
```
### Create a New Bucket With CORS Rules
```python
import pulumi
import pulumi_digitalocean as digitalocean
foobar = digitalocean.SpacesBucket("foobar",
cors_rules=[
digitalocean.SpacesBucketCorsRuleArgs(
allowed_headers=["*"],
allowed_methods=["GET"],
allowed_origins=["*"],
max_age_seconds=3000,
),
digitalocean.SpacesBucketCorsRuleArgs(
allowed_headers=["*"],
allowed_methods=[
"PUT",
"POST",
"DELETE",
],
allowed_origins=["https://www.example.com"],
max_age_seconds=3000,
),
],
region="nyc3")
```
## Import
Buckets can be imported using the `region` and `name` attributes (delimited by a comma)
```sh
$ pulumi import digitalocean:index/spacesBucket:SpacesBucket foobar `region`,`name`
```
:param str resource_name: The name of the resource.
:param SpacesBucketArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SpacesBucketArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[str]] = None,
cors_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketCorsRuleArgs']]]]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
lifecycle_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketLifecycleRuleArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
versioning: Optional[pulumi.Input[pulumi.InputType['SpacesBucketVersioningArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SpacesBucketArgs.__new__(SpacesBucketArgs)
__props__.__dict__["acl"] = acl
__props__.__dict__["cors_rules"] = cors_rules
__props__.__dict__["force_destroy"] = force_destroy
__props__.__dict__["lifecycle_rules"] = lifecycle_rules
__props__.__dict__["name"] = name
__props__.__dict__["region"] = region
__props__.__dict__["versioning"] = versioning
__props__.__dict__["bucket_domain_name"] = None
__props__.__dict__["bucket_urn"] = None
super(SpacesBucket, __self__).__init__(
'digitalocean:index/spacesBucket:SpacesBucket',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[str]] = None,
bucket_domain_name: Optional[pulumi.Input[str]] = None,
bucket_urn: Optional[pulumi.Input[str]] = None,
cors_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketCorsRuleArgs']]]]] = None,
force_destroy: Optional[pulumi.Input[bool]] = None,
lifecycle_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketLifecycleRuleArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[Union[str, 'Region']]] = None,
versioning: Optional[pulumi.Input[pulumi.InputType['SpacesBucketVersioningArgs']]] = None) -> 'SpacesBucket':
"""
Get an existing SpacesBucket resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl: Canned ACL applied on bucket creation (`private` or `public-read`)
:param pulumi.Input[str] bucket_domain_name: The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)
:param pulumi.Input[str] bucket_urn: The uniform resource name for the bucket
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketCorsRuleArgs']]]] cors_rules: A rule of Cross-Origin Resource Sharing (documented below).
:param pulumi.Input[bool] force_destroy: Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SpacesBucketLifecycleRuleArgs']]]] lifecycle_rules: A configuration of object lifecycle management (documented below).
:param pulumi.Input[str] name: The name of the bucket
:param pulumi.Input[Union[str, 'Region']] region: The region where the bucket resides (Defaults to `nyc3`)
:param pulumi.Input[pulumi.InputType['SpacesBucketVersioningArgs']] versioning: A state of versioning (documented below)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SpacesBucketState.__new__(_SpacesBucketState)
__props__.__dict__["acl"] = acl
__props__.__dict__["bucket_domain_name"] = bucket_domain_name
__props__.__dict__["bucket_urn"] = bucket_urn
__props__.__dict__["cors_rules"] = cors_rules
__props__.__dict__["force_destroy"] = force_destroy
__props__.__dict__["lifecycle_rules"] = lifecycle_rules
__props__.__dict__["name"] = name
__props__.__dict__["region"] = region
__props__.__dict__["versioning"] = versioning
return SpacesBucket(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def acl(self) -> pulumi.Output[Optional[str]]:
"""
Canned ACL applied on bucket creation (`private` or `public-read`)
"""
return pulumi.get(self, "acl")
@property
@pulumi.getter(name="bucketDomainName")
def bucket_domain_name(self) -> pulumi.Output[str]:
"""
The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)
"""
return pulumi.get(self, "bucket_domain_name")
@property
@pulumi.getter(name="bucketUrn")
def bucket_urn(self) -> pulumi.Output[str]:
"""
The uniform resource name for the bucket
"""
return pulumi.get(self, "bucket_urn")
@property
@pulumi.getter(name="corsRules")
def cors_rules(self) -> pulumi.Output[Optional[Sequence['outputs.SpacesBucketCorsRule']]]:
"""
A rule of Cross-Origin Resource Sharing (documented below).
"""
return pulumi.get(self, "cors_rules")
@property
@pulumi.getter(name="forceDestroy")
def force_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
Unless `true`, the bucket will only be destroyed if empty (Defaults to `false`)
"""
return pulumi.get(self, "force_destroy")
@property
@pulumi.getter(name="lifecycleRules")
def lifecycle_rules(self) -> pulumi.Output[Optional[Sequence['outputs.SpacesBucketLifecycleRule']]]:
"""
A configuration of object lifecycle management (documented below).
"""
return pulumi.get(self, "lifecycle_rules")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the bucket
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def region(self) -> pulumi.Output[Optional[str]]:
"""
The region where the bucket resides (Defaults to `nyc3`)
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def versioning(self) -> pulumi.Output[Optional['outputs.SpacesBucketVersioning']]:
"""
A state of versioning (documented below)
"""
return pulumi.get(self, "versioning")
| 43.581059
| 186
| 0.64086
| 2,954
| 27,151
| 5.704469
| 0.086324
| 0.08682
| 0.080055
| 0.033945
| 0.890867
| 0.87526
| 0.860127
| 0.844104
| 0.841078
| 0.824283
| 0
| 0.001769
| 0.250451
| 27,151
| 622
| 187
| 43.651125
| 0.826249
| 0.376855
| 0
| 0.771044
| 1
| 0
| 0.130979
| 0.056276
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161616
| false
| 0.003367
| 0.026936
| 0
| 0.286195
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7943fa9622a94768af29afa34b76def5cad626e
| 48
|
py
|
Python
|
masonite/contrib/azure/drivers/__init__.py
|
josephmancuso/masonite-azure-driver
|
06e766f9833b48d28a6ffd4afb24114be1a60a1c
|
[
"MIT"
] | null | null | null |
masonite/contrib/azure/drivers/__init__.py
|
josephmancuso/masonite-azure-driver
|
06e766f9833b48d28a6ffd4afb24114be1a60a1c
|
[
"MIT"
] | null | null | null |
masonite/contrib/azure/drivers/__init__.py
|
josephmancuso/masonite-azure-driver
|
06e766f9833b48d28a6ffd4afb24114be1a60a1c
|
[
"MIT"
] | 1
|
2019-08-07T16:53:09.000Z
|
2019-08-07T16:53:09.000Z
|
from .UploadAzureDriver import UploadAzureDriver
| 48
| 48
| 0.916667
| 4
| 48
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f7959c2912df756edf4a2138890b657e7ef3c597
| 279
|
py
|
Python
|
nanoleafapi/__init__.py
|
iandouglas/nanoleafapi
|
cc5f1c16f04fbafd5460569d2e5b165d1de7bc41
|
[
"MIT"
] | 29
|
2020-01-02T18:37:18.000Z
|
2022-03-28T19:03:16.000Z
|
nanoleafapi/__init__.py
|
iandouglas/nanoleafapi
|
cc5f1c16f04fbafd5460569d2e5b165d1de7bc41
|
[
"MIT"
] | 9
|
2020-01-04T18:59:19.000Z
|
2022-01-06T06:38:16.000Z
|
nanoleafapi/__init__.py
|
iandouglas/nanoleafapi
|
cc5f1c16f04fbafd5460569d2e5b165d1de7bc41
|
[
"MIT"
] | 8
|
2020-01-04T19:02:48.000Z
|
2022-03-20T18:49:19.000Z
|
from nanoleafapi.nanoleaf import Nanoleaf, NanoleafRegistrationError, NanoleafConnectionError, NanoleafEffectCreationError
from nanoleafapi.digital_twin import NanoleafDigitalTwin
from nanoleafapi.nanoleaf import RED, ORANGE, YELLOW, GREEN, LIGHT_BLUE, BLUE, PINK, PURPLE, WHITE
| 69.75
| 122
| 0.867384
| 28
| 279
| 8.571429
| 0.678571
| 0.1875
| 0.191667
| 0.241667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082437
| 279
| 3
| 123
| 93
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f797ce34a4068b696193c8fd1ec0a376ae1676fb
| 7,443
|
py
|
Python
|
dae/dae/pedigrees/tests/test_family_role_builder.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
dae/dae/pedigrees/tests/test_family_role_builder.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | 82
|
2019-07-22T11:44:23.000Z
|
2022-01-13T15:27:33.000Z
|
dae/dae/pedigrees/tests/test_family_role_builder.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
import pytest
from dae.pedigrees.family_role_builder import FamilyRoleBuilder
from dae.pedigrees.loader import FamiliesLoader
from dae.variants.attributes import Role
# TODO: Organize into 1 test
@pytest.mark.parametrize(
"ped_file",
["pedigrees/pedigree_no_role_A.ped", "pedigrees/pedigree_no_role_B.ped"],
)
def test_mom_dad_child_sibling_roles(fixture_dirname, ped_file):
families = FamiliesLoader.load_pedigree_file(fixture_dirname(ped_file))
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.dad
assert members[1].role == Role.mom
assert members[2].role == Role.prb
assert members[3].role == Role.sib
def test_paternal_and_maternal_grandparents(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_C.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.maternal_grandfather
assert members[1].role == Role.maternal_grandmother
assert members[2].role == Role.paternal_grandfather
assert members[3].role == Role.paternal_grandmother
assert members[4].role == Role.dad
assert members[5].role == Role.mom
assert members[6].role == Role.prb
assert members[7].role == Role.sib
def test_child_and_spouse(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_D.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.dad
assert members[1].role == Role.mom
assert members[2].role == Role.prb
assert members[3].role == Role.sib
assert members[4].role == Role.spouse
assert members[5].role == Role.child
def test_maternal_and_paternal_aunts_and_uncles(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_E.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.maternal_grandfather
assert members[1].role == Role.maternal_grandmother
assert members[2].role == Role.paternal_grandfather
assert members[3].role == Role.paternal_grandmother
assert members[4].role == Role.dad
assert members[5].role == Role.mom
assert members[6].role == Role.maternal_aunt
assert members[7].role == Role.maternal_uncle
assert members[8].role == Role.paternal_aunt
assert members[9].role == Role.paternal_uncle
assert members[10].role == Role.prb
assert members[11].role == Role.sib
def test_maternal_and_paternal_cousins(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_F.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.maternal_grandfather
assert members[1].role == Role.maternal_grandmother
assert members[2].role == Role.paternal_grandfather
assert members[3].role == Role.paternal_grandmother
assert members[4].role == Role.dad
assert members[5].role == Role.mom
assert members[6].role == Role.maternal_aunt
assert members[7].role == Role.unknown
assert members[8].role == Role.unknown
assert members[9].role == Role.paternal_uncle
assert members[10].role == Role.prb
assert members[11].role == Role.sib
assert members[12].role == Role.maternal_cousin
assert members[13].role == Role.paternal_cousin
def test_stepmom_and_stepdad(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_G.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.dad
assert members[1].role == Role.mom
assert members[2].role == Role.step_dad
assert members[3].role == Role.step_mom
assert members[4].role == Role.maternal_half_sibling
assert members[5].role == Role.paternal_half_sibling
assert members[6].role == Role.prb
assert members[7].role == Role.sib
def test_handling_of_family_with_only_prb_role(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_prb_only.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.dad
assert members[1].role == Role.mom
assert members[2].role == Role.prb
assert members[3].role == Role.sib
def test_handling_of_large_family_with_only_prb_role(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_prb_only_large.ped")
families = FamiliesLoader.load_pedigree_file(ped_file)
family = families.get("f1")
role_builder = FamilyRoleBuilder(family)
role_builder.build_roles()
members = family.full_members
assert members[0].role == Role.maternal_grandfather
assert members[1].role == Role.maternal_grandmother
assert members[2].role == Role.paternal_grandfather
assert members[3].role == Role.paternal_grandmother
assert members[4].role == Role.dad
assert members[5].role == Role.mom
assert members[6].role == Role.maternal_aunt
assert members[7].role == Role.unknown
assert members[8].role == Role.unknown
assert members[9].role == Role.paternal_uncle
assert members[10].role == Role.prb
assert members[11].role == Role.sib
assert members[12].role == Role.maternal_cousin
assert members[13].role == Role.paternal_cousin
def test_proband_column(fixture_dirname):
ped_file = fixture_dirname("pedigrees/pedigree_no_role_F.ped")
loader = FamiliesLoader(ped_file, **{"ped_no_role": True})
families = loader.load()
for person in families.persons.values():
assert not person.has_attr("proband")
ped_file = fixture_dirname("pedigrees/pedigree_no_role_H.ped")
loader = FamiliesLoader(ped_file, **{"ped_no_role": True})
families = loader.load()
for person in families.persons.values():
assert person.has_attr("proband")
family = families.get("f1")
assert family is not None
members = family.full_members
assert members[0].role == Role.maternal_grandfather
assert members[1].role == Role.maternal_grandmother
assert members[2].role == Role.paternal_grandfather
assert members[3].role == Role.paternal_grandmother
assert members[4].role == Role.dad
assert members[5].role == Role.mom
assert members[6].role == Role.maternal_aunt
assert members[7].role == Role.unknown
assert members[8].role == Role.unknown
assert members[9].role == Role.paternal_uncle
assert members[10].role == Role.prb
assert members[11].role == Role.sib
assert members[12].role == Role.maternal_cousin
assert members[13].role == Role.paternal_cousin
| 37.781726
| 77
| 0.730754
| 1,003
| 7,443
| 5.210369
| 0.098704
| 0.208955
| 0.058171
| 0.040184
| 0.886146
| 0.843666
| 0.843666
| 0.839839
| 0.83142
| 0.83142
| 0
| 0.017236
| 0.158135
| 7,443
| 196
| 78
| 37.97449
| 0.816789
| 0.003493
| 0
| 0.776398
| 0
| 0
| 0.056372
| 0.048011
| 0
| 0
| 0
| 0.005102
| 0.540373
| 1
| 0.055901
| false
| 0
| 0.024845
| 0
| 0.080745
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e3a2736be9f34be25010fe308d66fa886eba238c
| 24,229
|
py
|
Python
|
pred.py
|
BoyuanChen/neural-state-variables
|
10483d93ac8c006f3786c434fb57d70d9ab465ec
|
[
"MIT"
] | 17
|
2021-12-29T16:48:46.000Z
|
2022-03-25T01:57:13.000Z
|
pred.py
|
BoyuanChen/neural-state-variables
|
10483d93ac8c006f3786c434fb57d70d9ab465ec
|
[
"MIT"
] | null | null | null |
pred.py
|
BoyuanChen/neural-state-variables
|
10483d93ac8c006f3786c434fb57d70d9ab465ec
|
[
"MIT"
] | 1
|
2022-01-22T11:26:09.000Z
|
2022-01-22T11:26:09.000Z
|
"""
A. Long-term future prediction (model rollout)
1. encoder-decoder (0, 1 -> 8192-dim latent -> 2', 3'):
- feed (2', 3') images as input to predict (4', 5') images ...
2. encoder-decoder-64 (0, 1 -> 64-dim latent -> 2', 3'):
- feed (2', 3') images as input to predict (4', 5') images ...
3. encoder-decoder-64 & refine-64 (0, 1 -> id-dim latent -> 2', 3')
- feed (2', 3') images as input to predict (4', 5') images ...
4. encoder-decoder-64 & refine-64 hybrid:
- use refine-64 model at certain prediction steps
B. Long-term future prediction with perturbation (model rollout)
"""
import os
import sys
import glob
import yaml
import json
import torch
import pprint
import shutil
import numpy as np
from PIL import Image
from tqdm import tqdm
from munch import munchify
from torchvision import transforms
from collections import OrderedDict
from models import VisDynamicsModel
from models_latentpred import VisLatentDynamicsModel
from dataset import NeuralPhysDataset
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.loggers import TensorBoardLogger
def mkdir(folder):
if os.path.exists(folder):
shutil.rmtree(folder)
os.makedirs(folder)
def load_config(filepath):
with open(filepath, 'r') as stream:
try:
trainer_params = yaml.safe_load(stream)
return trainer_params
except yaml.YAMLError as exc:
print(exc)
def seed(cfg):
torch.manual_seed(cfg.seed)
if cfg.if_cuda:
torch.cuda.manual_seed(cfg.seed)
# uncomment for strict reproducibility
# torch.set_deterministic(True)
def model_rollout():
config_filepath = str(sys.argv[2])
cfg = load_config(filepath=config_filepath)
pprint.pprint(cfg)
cfg = munchify(cfg)
seed(cfg)
seed_everything(cfg.seed)
log_dir = '_'.join([cfg.log_dir,
cfg.dataset,
cfg.model_name,
str(cfg.seed)])
model = VisDynamicsModel(lr=cfg.lr,
seed=cfg.seed,
if_cuda=cfg.if_cuda,
if_test=True,
gamma=cfg.gamma,
log_dir=log_dir,
train_batch=cfg.train_batch,
val_batch=cfg.val_batch,
test_batch=cfg.test_batch,
num_workers=cfg.num_workers,
model_name=cfg.model_name,
data_filepath=cfg.data_filepath,
dataset=cfg.dataset,
lr_schedule=cfg.lr_schedule)
# load model
if cfg.model_name == 'encoder-decoder' or cfg.model_name == 'encoder-decoder-64':
checkpoint_filepath = str(sys.argv[3])
checkpoint_filepath = glob.glob(os.path.join(checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(checkpoint_filepath)
model.load_state_dict(ckpt['state_dict'])
if 'refine' in cfg.model_name:
checkpoint_filepath = str(sys.argv[4])
checkpoint_filepath = glob.glob(os.path.join(checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.model.load_state_dict(ckpt)
high_dim_checkpoint_filepath = str(sys.argv[3])
high_dim_checkpoint_filepath = glob.glob(os.path.join(high_dim_checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(high_dim_checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.high_dim_model.load_state_dict(ckpt)
model = model.to('cuda')
model.eval()
model.freeze()
# get all the test video ids
data_filepath_base = os.path.join(cfg.data_filepath, cfg.dataset)
with open(os.path.join('../datainfo', cfg.dataset, f'data_split_dict_{cfg.seed}.json'), 'r') as file:
seq_dict = json.load(file)
test_vid_ids = seq_dict['test']
pred_len = int(sys.argv[6])
long_term_folder = os.path.join(log_dir, 'prediction_long_term', 'model_rollout')
loss_dict = {}
if cfg.model_name == 'encoder-decoder' or cfg.model_name == 'encoder-decoder-64':
for p_vid_idx in tqdm(test_vid_ids):
vid_filepath = os.path.join(data_filepath_base, str(p_vid_idx))
total_num_frames = len(os.listdir(vid_filepath))
suf = os.listdir(vid_filepath)[0].split('.')[-1]
data = None
saved_folder = os.path.join(long_term_folder, str(p_vid_idx))
mkdir(saved_folder)
loss_lst = []
for start_frame_idx in range(total_num_frames - 3):
if start_frame_idx % 2 != 0:
continue
# take the initial input from ground truth data
if start_frame_idx % pred_len == 0:
data = [get_data(os.path.join(vid_filepath, f'{start_frame_idx}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+1}.{suf}'))]
data = (torch.cat(data, 2)).unsqueeze(0)
# get the target
target = [get_data(os.path.join(vid_filepath, f'{start_frame_idx+2}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+3}.{suf}'))]
target = (torch.cat(target, 2)).unsqueeze(0)
# feed into the model
if cfg.model_name == 'encoder-decoder':
output, latent = model.model(data.cuda())
if cfg.model_name == 'encoder-decoder-64':
output, latent = model.model(data.cuda(), data.cuda(), False)
# compute loss
loss_lst.append(float(model.loss_func(output, target.cuda()).cpu().detach().numpy()))
# save (2', 3'), (4', 5'), ...
img = tensor_to_img(output[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx+2}.{suf}'))
img = tensor_to_img(output[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+3}.{suf}'))
# the output becomes the input data in the next iteration
data = torch.tensor(output.cpu().detach().numpy()).float()
loss_dict[p_vid_idx] = loss_lst
# save the test loss for all the testing videos
with open(os.path.join(long_term_folder, 'test_loss.json'), 'w') as file:
json.dump(loss_dict, file, indent=4)
if 'refine' in cfg.model_name:
for p_vid_idx in tqdm(test_vid_ids):
vid_filepath = os.path.join(data_filepath_base, str(p_vid_idx))
total_num_frames = len(os.listdir(vid_filepath))
suf = os.listdir(vid_filepath)[0].split('.')[-1]
data = None
saved_folder = os.path.join(long_term_folder, str(p_vid_idx))
mkdir(saved_folder)
loss_lst = []
for start_frame_idx in range(total_num_frames - 3):
if start_frame_idx % 2 != 0:
continue
# take the initial input from ground truth data
if start_frame_idx % pred_len == 0:
data = [get_data(os.path.join(vid_filepath, f'{start_frame_idx}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+1}.{suf}'))]
data = (torch.cat(data, 2)).unsqueeze(0)
# get the target
target = [get_data(os.path.join(vid_filepath, f'{start_frame_idx+2}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+3}.{suf}'))]
target = (torch.cat(target, 2)).unsqueeze(0)
# feed into the model
_, latent = model.high_dim_model(data.cuda(), data.cuda(), False)
latent = latent.squeeze(-1).squeeze(-1)
latent_reconstructed, latent_latent = model.model(latent)
output, _ = model.high_dim_model(data.cuda(), latent_reconstructed.unsqueeze(2).unsqueeze(3), True)
# compute loss
loss_lst.append(float(model.loss_func(output, target.cuda()).cpu().detach().numpy()))
# save (2', 3'), (4', 5'), ...
img = tensor_to_img(output[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx+2}.{suf}'))
img = tensor_to_img(output[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+3}.{suf}'))
# the output becomes the input data in the next iteration
data = torch.tensor(output.cpu().detach().numpy()).float()
loss_dict[p_vid_idx] = loss_lst
# save the test loss for all the testing videos
with open(os.path.join(long_term_folder, 'test_loss.json'), 'w') as file:
json.dump(loss_dict, file, indent=4)
def model_rollout_hybrid(step):
config_filepath = str(sys.argv[2])
cfg = load_config(filepath=config_filepath)
pprint.pprint(cfg)
cfg = munchify(cfg)
seed(cfg)
seed_everything(cfg.seed)
if 'refine' not in cfg.model_name:
assert False, "the hybrid scheme is only supported with refine model..."
log_dir = '_'.join([cfg.log_dir,
cfg.dataset,
cfg.model_name,
str(cfg.seed)])
model = VisDynamicsModel(lr=cfg.lr,
seed=cfg.seed,
if_cuda=cfg.if_cuda,
if_test=True,
gamma=cfg.gamma,
log_dir=log_dir,
train_batch=cfg.train_batch,
val_batch=cfg.val_batch,
test_batch=cfg.test_batch,
num_workers=cfg.num_workers,
model_name=cfg.model_name,
data_filepath=cfg.data_filepath,
dataset=cfg.dataset,
lr_schedule=cfg.lr_schedule)
# load model
checkpoint_filepath = str(sys.argv[4])
checkpoint_filepath = glob.glob(os.path.join(checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.model.load_state_dict(ckpt)
high_dim_checkpoint_filepath = str(sys.argv[3])
high_dim_checkpoint_filepath = glob.glob(os.path.join(high_dim_checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(high_dim_checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.high_dim_model.load_state_dict(ckpt)
model = model.to('cuda')
model.eval()
model.freeze()
# get all the test video ids
data_filepath_base = os.path.join(cfg.data_filepath, cfg.dataset)
with open(os.path.join('../datainfo', cfg.dataset, f'data_split_dict_{cfg.seed}.json'), 'r') as file:
seq_dict = json.load(file)
test_vid_ids = seq_dict['test']
pred_len = int(sys.argv[6])
long_term_folder = os.path.join(log_dir, 'prediction_long_term', f'hybrid_rollout_{step}')
loss_dict = {}
for p_vid_idx in tqdm(test_vid_ids):
vid_filepath = os.path.join(data_filepath_base, str(p_vid_idx))
total_num_frames = len(os.listdir(vid_filepath))
suf = os.listdir(vid_filepath)[0].split('.')[-1]
data = None
saved_folder = os.path.join(long_term_folder, str(p_vid_idx))
mkdir(saved_folder)
loss_lst = []
for start_frame_idx in range(total_num_frames - 3):
if start_frame_idx % 2 != 0:
continue
# take the initial input from ground truth data
if start_frame_idx % pred_len == 0:
data = [get_data(os.path.join(vid_filepath, f'{start_frame_idx}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+1}.{suf}'))]
data = (torch.cat(data, 2)).unsqueeze(0)
# get the target
target = [get_data(os.path.join(vid_filepath, f'{start_frame_idx+2}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+3}.{suf}'))]
target = (torch.cat(target, 2)).unsqueeze(0)
# feed into the model
if (start_frame_idx + 2) % (2 * step + 2) == 0:
_, latent = model.high_dim_model(data.cuda(), data.cuda(), False)
latent = latent.squeeze(-1).squeeze(-1)
latent_reconstructed, latent_latent = model.model(latent)
output, _ = model.high_dim_model(data.cuda(), latent_reconstructed.unsqueeze(2).unsqueeze(3), True)
else:
output, _ = model.high_dim_model(data.cuda(), data.cuda(), False)
# compute loss
loss_lst.append(float(model.loss_func(output, target.cuda()).cpu().detach().numpy()))
# save (2', 3'), (4', 5'), ...
img = tensor_to_img(output[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx+2}.{suf}'))
img = tensor_to_img(output[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+3}.{suf}'))
# the output becomes the input data in the next iteration
data = torch.tensor(output.cpu().detach().numpy()).float()
loss_dict[p_vid_idx] = loss_lst
# save the test loss for all the testing videos
with open(os.path.join(long_term_folder, 'test_loss.json'), 'w') as file:
json.dump(loss_dict, file, indent=4)
def model_rollout_perturb(perturb_type, perturb_level):
config_filepath = str(sys.argv[2])
cfg = load_config(filepath=config_filepath)
pprint.pprint(cfg)
cfg = munchify(cfg)
seed(cfg)
seed_everything(cfg.seed)
log_dir = '_'.join([cfg.log_dir,
cfg.dataset,
cfg.model_name,
str(cfg.seed)])
model = VisDynamicsModel(lr=cfg.lr,
seed=cfg.seed,
if_cuda=cfg.if_cuda,
if_test=True,
gamma=cfg.gamma,
log_dir=log_dir,
train_batch=cfg.train_batch,
val_batch=cfg.val_batch,
test_batch=cfg.test_batch,
num_workers=cfg.num_workers,
model_name=cfg.model_name,
data_filepath=cfg.data_filepath,
dataset=cfg.dataset,
lr_schedule=cfg.lr_schedule)
# load model
if cfg.model_name == 'encoder-decoder' or cfg.model_name == 'encoder-decoder-64':
checkpoint_filepath = str(sys.argv[3])
checkpoint_filepath = glob.glob(os.path.join(checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(checkpoint_filepath)
model.load_state_dict(ckpt['state_dict'])
if 'refine' in cfg.model_name:
checkpoint_filepath = str(sys.argv[4])
checkpoint_filepath = glob.glob(os.path.join(checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.model.load_state_dict(ckpt)
high_dim_checkpoint_filepath = str(sys.argv[3])
high_dim_checkpoint_filepath = glob.glob(os.path.join(high_dim_checkpoint_filepath, '*.ckpt'))[0]
ckpt = torch.load(high_dim_checkpoint_filepath)
ckpt = rename_ckpt_for_multi_models(ckpt)
model.high_dim_model.load_state_dict(ckpt)
model = model.to('cuda')
model.eval()
model.freeze()
# get all the test video ids
data_filepath_base = os.path.join(cfg.data_filepath, cfg.dataset)
with open(os.path.join('../datainfo', cfg.dataset, f'data_split_dict_{cfg.seed}.json'), 'r') as file:
seq_dict = json.load(file)
test_vid_ids = seq_dict['test']
pred_len = int(sys.argv[6])
long_term_folder = os.path.join(log_dir, 'prediction_long_term', f'model_rollout_perturb_{perturb_type}_{perturb_level}')
loss_dict = {}
if cfg.model_name == 'encoder-decoder' or cfg.model_name == 'encoder-decoder-64':
for p_vid_idx in tqdm(test_vid_ids):
vid_filepath = os.path.join(data_filepath_base, str(p_vid_idx))
total_num_frames = len(os.listdir(vid_filepath))
suf = os.listdir(vid_filepath)[0].split('.')[-1]
data = None
saved_folder = os.path.join(long_term_folder, str(p_vid_idx))
mkdir(saved_folder)
loss_lst = []
for start_frame_idx in range(total_num_frames - 3):
if start_frame_idx % 2 != 0:
continue
# take the initial input from ground truth data
if start_frame_idx % pred_len == 0:
data = [get_data_perturb(os.path.join(vid_filepath, f'{start_frame_idx}.{suf}'), perturb_type, perturb_level),
get_data_perturb(os.path.join(vid_filepath, f'{start_frame_idx+1}.{suf}'), perturb_type, perturb_level)]
data = (torch.cat(data, 2)).unsqueeze(0)
img = tensor_to_img(data[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx}.{suf}'))
img = tensor_to_img(data[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+1}.{suf}'))
# get the target
target = [get_data(os.path.join(vid_filepath, f'{start_frame_idx+2}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+3}.{suf}'))]
target = (torch.cat(target, 2)).unsqueeze(0)
# feed into the model
if cfg.model_name == 'encoder-decoder':
output, latent = model.model(data.cuda())
if cfg.model_name == 'encoder-decoder-64':
output, latent = model.model(data.cuda(), data.cuda(), False)
# compute loss
loss_lst.append(float(model.loss_func(output, target.cuda()).cpu().detach().numpy()))
# save (2', 3'), (4', 5'), ...
img = tensor_to_img(output[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx+2}.{suf}'))
img = tensor_to_img(output[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+3}.{suf}'))
# the output becomes the input data in the next iteration
data = torch.tensor(output.cpu().detach().numpy()).float()
loss_dict[p_vid_idx] = loss_lst
# save the test loss for all the testing videos
with open(os.path.join(long_term_folder, 'test_loss.json'), 'w') as file:
json.dump(loss_dict, file, indent=4)
if 'refine' in cfg.model_name:
for p_vid_idx in tqdm(test_vid_ids):
vid_filepath = os.path.join(data_filepath_base, str(p_vid_idx))
total_num_frames = len(os.listdir(vid_filepath))
suf = os.listdir(vid_filepath)[0].split('.')[-1]
data = None
saved_folder = os.path.join(long_term_folder, str(p_vid_idx))
mkdir(saved_folder)
loss_lst = []
for start_frame_idx in range(total_num_frames - 3):
if start_frame_idx % 2 != 0:
continue
# take the initial input from ground truth data
if start_frame_idx % pred_len == 0:
data = [get_data_perturb(os.path.join(vid_filepath, f'{start_frame_idx}.{suf}'), perturb_type, perturb_level),
get_data_perturb(os.path.join(vid_filepath, f'{start_frame_idx+1}.{suf}'), perturb_type, perturb_level)]
data = (torch.cat(data, 2)).unsqueeze(0)
img = tensor_to_img(data[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx}.{suf}'))
img = tensor_to_img(data[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+1}.{suf}'))
# get the target
target = [get_data(os.path.join(vid_filepath, f'{start_frame_idx+2}.{suf}')),
get_data(os.path.join(vid_filepath, f'{start_frame_idx+3}.{suf}'))]
target = (torch.cat(target, 2)).unsqueeze(0)
# feed into the model
_, latent = model.high_dim_model(data.cuda(), data.cuda(), False)
latent = latent.squeeze(-1).squeeze(-1)
latent_reconstructed, latent_latent = model.model(latent)
output, _ = model.high_dim_model(data.cuda(), latent_reconstructed.unsqueeze(2).unsqueeze(3), True)
# compute loss
loss_lst.append(float(model.loss_func(output, target.cuda()).cpu().detach().numpy()))
# save (2', 3'), (4', 5'), ...
img = tensor_to_img(output[0, :, :, :128])
img.save(os.path.join(saved_folder, f'{start_frame_idx+2}.{suf}'))
img = tensor_to_img(output[0, :, :, 128:])
img.save(os.path.join(saved_folder, f'{start_frame_idx+3}.{suf}'))
# the output becomes the input data in the next iteration
data = torch.tensor(output.cpu().detach().numpy()).float()
loss_dict[p_vid_idx] = loss_lst
# save the test loss for all the testing videos
with open(os.path.join(long_term_folder, 'test_loss.json'), 'w') as file:
json.dump(loss_dict, file, indent=4)
def rename_ckpt_for_multi_models(ckpt):
renamed_state_dict = OrderedDict()
for k, v in ckpt['state_dict'].items():
if 'high_dim_model' in k:
name = k.replace('high_dim_model.', '')
else:
name = k.replace('model.', '')
renamed_state_dict[name] = v
return renamed_state_dict
def get_data(filepath):
data = Image.open(filepath)
data = data.resize((128, 128))
data = np.array(data)
data = torch.tensor(data / 255.0)
data = data.permute(2, 0, 1).float()
return data
def get_data_perturb(filepath, perturb_type, perturb_level):
data = Image.open(filepath)
data = data.resize((128, 128))
data = np.array(data)
bg_color = np.array([215, 205, 192])
rng = np.random.RandomState(int(filepath.split('/')[-2]))
new_bg_color = rng.randint(256, size=3)
if perturb_type == 'background_replace':
for i in range(2**(perturb_level-1)):
for j in range(2**(perturb_level-1)):
if np.array_equal(data[i, j], bg_color):
data[i, j] = new_bg_color
elif perturb_type == 'background_cover':
for i in range(2**(perturb_level-1)):
for j in range(2**(perturb_level-1)):
data[i, j] = new_bg_color
elif perturb_type == 'white_noise':
sigma = 255.0 * (2**(perturb_level-1) / 128) ** 2
data = data + rng.normal(0, sigma, data.shape)
else:
pass
data = torch.tensor(data / 255.0)
data = data.permute(2, 0, 1).float()
return data
# out_tensor: 3 x 128 x 128 -> 128 x 128 x 3
def tensor_to_img(out_tensor):
return transforms.ToPILImage()(out_tensor).convert("RGB")
if __name__ == '__main__':
if str(sys.argv[1]) == 'model-rollout':
model_rollout()
elif 'hybrid' in str(sys.argv[1]):
step = int(sys.argv[1].split('-')[-1])
model_rollout_hybrid(step)
elif str(sys.argv[1]) == 'latent-prediction':
latent_prediction()
elif 'perturb' in str(sys.argv[1]):
perturb_type = str(sys.argv[1].split('-')[-2])
perturb_level = int(sys.argv[1].split('-')[-1])
model_rollout_perturb(perturb_type, perturb_level)
else:
assert False, "prediction scheme is not supported..."
| 44.538603
| 132
| 0.577737
| 3,171
| 24,229
| 4.183223
| 0.075055
| 0.030305
| 0.049755
| 0.035884
| 0.861138
| 0.85277
| 0.85066
| 0.842518
| 0.838146
| 0.832567
| 0
| 0.019742
| 0.295472
| 24,229
| 544
| 133
| 44.538603
| 0.757352
| 0.080111
| 0
| 0.806846
| 0
| 0
| 0.078967
| 0.04506
| 0
| 0
| 0
| 0
| 0.00489
| 1
| 0.02445
| false
| 0.002445
| 0.046455
| 0.002445
| 0.08313
| 0.012225
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3aaacc383334438950848972ea1163fedc75530
| 8,923
|
py
|
Python
|
tests/test_linear_model.py
|
hugocool/explainerdashboard
|
e725528c3d94a1a45b51bd9632686d0697274f54
|
[
"MIT"
] | 1,178
|
2019-12-20T10:56:17.000Z
|
2022-03-30T13:05:48.000Z
|
tests/test_linear_model.py
|
MaxCodeXTC/explainerdashboard
|
dfbf9bbafab8b82317cc0437a41e4582b101e081
|
[
"MIT"
] | 172
|
2020-03-04T08:15:01.000Z
|
2022-03-31T20:23:14.000Z
|
tests/test_linear_model.py
|
MaxCodeXTC/explainerdashboard
|
dfbf9bbafab8b82317cc0437a41e4582b101e081
|
[
"MIT"
] | 150
|
2020-03-04T04:43:52.000Z
|
2022-03-29T06:57:00.000Z
|
import unittest
import pandas as pd
import numpy as np
import shap
import plotly.graph_objects as go
from sklearn.linear_model import LinearRegression, LogisticRegression
from explainerdashboard.explainers import RegressionExplainer, ClassifierExplainer
from explainerdashboard.datasets import titanic_fare, titanic_survive, titanic_names
class LinearRegressionTests(unittest.TestCase):
def setUp(self):
X_train, y_train, X_test, y_test = titanic_fare()
self.test_len = len(X_test)
train_names, test_names = titanic_names()
_, self.names = titanic_names()
model = LinearRegression()
model.fit(X_train, y_train)
self.explainer = RegressionExplainer(model, X_test, y_test,
shap='linear',
cats=[{'Gender': ['Sex_female', 'Sex_male', 'Sex_nan']},
'Deck', 'Embarked'],
idxs=test_names, units="$")
def test_explainer_len(self):
self.assertEqual(len(self.explainer), self.test_len)
def test_int_idx(self):
self.assertEqual(self.explainer.get_idx(self.names[0]), 0)
def test_random_index(self):
self.assertIsInstance(self.explainer.random_index(), int)
self.assertIsInstance(self.explainer.random_index(return_str=True), str)
def test_preds(self):
self.assertIsInstance(self.explainer.preds, np.ndarray)
def test_pred_percentiles(self):
self.assertIsInstance(self.explainer.pred_percentiles(), np.ndarray)
def test_permutation_importances(self):
self.assertIsInstance(self.explainer.get_permutation_importances_df(), pd.DataFrame)
def test_metrics(self):
self.assertIsInstance(self.explainer.metrics(), dict)
self.assertIsInstance(self.explainer.metrics_descriptions(), dict)
def test_mean_abs_shap_df(self):
self.assertIsInstance(self.explainer.get_mean_abs_shap_df(), pd.DataFrame)
def test_top_interactions(self):
self.assertIsInstance(self.explainer.top_shap_interactions("Age"), list)
self.assertIsInstance(self.explainer.top_shap_interactions("Age", topx=4), list)
def test_contrib_df(self):
self.assertIsInstance(self.explainer.get_contrib_df(0), pd.DataFrame)
self.assertIsInstance(self.explainer.get_contrib_df(0, topx=3), pd.DataFrame)
def test_shap_base_value(self):
self.assertIsInstance(self.explainer.shap_base_value(), (np.floating, float))
def test_shap_values_shape(self):
self.assertTrue(self.explainer.get_shap_values_df().shape == (len(self.explainer), len(self.explainer.merged_cols)))
def test_shap_values(self):
self.assertIsInstance(self.explainer.get_shap_values_df(), pd.DataFrame)
def test_mean_abs_shap(self):
self.assertIsInstance(self.explainer.get_mean_abs_shap_df(), pd.DataFrame)
def test_calculate_properties(self):
self.explainer.calculate_properties(include_interactions=False)
def test_pdp_df(self):
self.assertIsInstance(self.explainer.pdp_df("Age"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Gender"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Deck"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Age", index=0), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Gender", index=0), pd.DataFrame)
class LogisticRegressionTests(unittest.TestCase):
def setUp(self):
X_train, y_train, X_test, y_test = titanic_survive()
train_names, test_names = titanic_names()
model = LogisticRegression()
model.fit(X_train, y_train)
self.explainer = ClassifierExplainer(
model, X_test, y_test,
shap='linear',
cats=['Sex', 'Deck', 'Embarked'],
labels=['Not survived', 'Survived'],
idxs=test_names)
def test_preds(self):
self.assertIsInstance(self.explainer.preds, np.ndarray)
def test_pred_percentiles(self):
self.assertIsInstance(self.explainer.pred_percentiles(), np.ndarray)
def test_columns_ranked_by_shap(self):
self.assertIsInstance(self.explainer.columns_ranked_by_shap(), list)
def test_permutation_importances(self):
self.assertIsInstance(self.explainer.get_permutation_importances_df(), pd.DataFrame)
def test_metrics(self):
self.assertIsInstance(self.explainer.metrics(), dict)
self.assertIsInstance(self.explainer.metrics_descriptions(), dict)
def test_mean_abs_shap_df(self):
self.assertIsInstance(self.explainer.get_mean_abs_shap_df(), pd.DataFrame)
def test_contrib_df(self):
self.assertIsInstance(self.explainer.get_contrib_df(0), pd.DataFrame)
self.assertIsInstance(self.explainer.get_contrib_df(0, topx=3), pd.DataFrame)
def test_shap_base_value(self):
self.assertIsInstance(self.explainer.shap_base_value(), (np.floating, float))
def test_shap_values_shape(self):
self.assertTrue(self.explainer.get_shap_values_df().shape == (len(self.explainer), len(self.explainer.merged_cols)))
def test_shap_values(self):
self.assertIsInstance(self.explainer.get_shap_values_df(), pd.DataFrame)
def test_mean_abs_shap(self):
self.assertIsInstance(self.explainer.get_mean_abs_shap_df(), pd.DataFrame)
def test_calculate_properties(self):
self.explainer.calculate_properties(include_interactions=False)
def test_pdp_df(self):
self.assertIsInstance(self.explainer.pdp_df("Age"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Sex"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Deck"), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Age", index=0), pd.DataFrame)
self.assertIsInstance(self.explainer.pdp_df("Sex", index=0), pd.DataFrame)
def test_pos_label(self):
self.explainer.pos_label = 1
self.explainer.pos_label = "Not survived"
self.assertIsInstance(self.explainer.pos_label, int)
self.assertIsInstance(self.explainer.pos_label_str, str)
self.assertEqual(self.explainer.pos_label, 0)
self.assertEqual(self.explainer.pos_label_str, "Not survived")
def test_pred_probas(self):
self.assertIsInstance(self.explainer.pred_probas(), np.ndarray)
def test_metrics(self):
self.assertIsInstance(self.explainer.metrics(), dict)
self.assertIsInstance(self.explainer.metrics(cutoff=0.9), dict)
def test_precision_df(self):
self.assertIsInstance(self.explainer.get_precision_df(), pd.DataFrame)
self.assertIsInstance(self.explainer.get_precision_df(multiclass=True), pd.DataFrame)
self.assertIsInstance(self.explainer.get_precision_df(quantiles=4), pd.DataFrame)
def test_lift_curve_df(self):
self.assertIsInstance(self.explainer.get_liftcurve_df(), pd.DataFrame)
class LogisticRegressionKernelTests(unittest.TestCase):
def setUp(self):
X_train, y_train, X_test, y_test = titanic_survive()
train_names, test_names = titanic_names()
model = LogisticRegression()
model.fit(X_train, y_train)
self.explainer = ClassifierExplainer(
model, X_test.iloc[:20], y_test.iloc[:20],
shap='kernel', model_output='probability',
X_background=shap.sample(X_train, 5),
cats=[{'Gender': ['Sex_female', 'Sex_male', 'Sex_nan']},
'Deck', 'Embarked'],
labels=['Not survived', 'Survived'])
def test_shap_values(self):
self.assertIsInstance(self.explainer.shap_base_value(), (np.floating, float))
self.assertTrue(self.explainer.get_shap_values_df().shape == (len(self.explainer), len(self.explainer.merged_cols)))
self.assertIsInstance(self.explainer.get_shap_values_df(), pd.DataFrame)
class LinearRegressionKernelTests(unittest.TestCase):
def setUp(self):
X_train, y_train, X_test, y_test = titanic_fare()
self.test_len = len(X_test)
model = LinearRegression().fit(X_train, y_train)
self.explainer = RegressionExplainer(model, X_test.iloc[:20], y_test.iloc[:20], shap='kernel',
X_background=shap.sample(X_train, 5))
def test_shap_values(self):
self.assertIsInstance(self.explainer.shap_base_value(), (np.floating, float))
self.assertTrue(self.explainer.get_shap_values_df().shape == (len(self.explainer), len(self.explainer.merged_cols)))
self.assertIsInstance(self.explainer.get_shap_values_df(), pd.DataFrame)
| 42.899038
| 124
| 0.684747
| 1,059
| 8,923
| 5.522191
| 0.118036
| 0.164501
| 0.205198
| 0.282148
| 0.826436
| 0.823016
| 0.75171
| 0.725889
| 0.70041
| 0.677497
| 0
| 0.00394
| 0.203519
| 8,923
| 207
| 125
| 43.10628
| 0.818911
| 0
| 0
| 0.609272
| 0
| 0
| 0.027457
| 0
| 0
| 0
| 0
| 0
| 0.384106
| 1
| 0.264901
| false
| 0
| 0.07947
| 0
| 0.370861
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.