hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2672e09e16970b490a70f003cb1d596e6d20b941
| 148
|
py
|
Python
|
venv/Lib/site-packages/IPython/utils/jsonutil.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
venv/Lib/site-packages/IPython/utils/jsonutil.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
venv/Lib/site-packages/IPython/utils/jsonutil.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from warnings import warn
warn("IPython.utils.jsonutil has moved to jupyter_client.jsonutil", stacklevel=2)
from jupyter_client.jsonutil import *
| 24.666667
| 81
| 0.817568
| 21
| 148
| 5.666667
| 0.666667
| 0.218487
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007576
| 0.108108
| 148
| 5
| 82
| 29.6
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0.398649
| 0.304054
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cd1eb116bcef1822402f413d70d4f7ef09472efd
| 3,763
|
py
|
Python
|
carbondesign/tests/test_date_picker.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_date_picker.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_date_picker.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
# pylint:disable=missing-module-docstring,missing-class-docstring,missing-function-docstring
from .base import compare_template, SimpleTestCase
class DatePickerTest(SimpleTestCase):
maxDiff = None
def test_rendered(self):
template = """
{% load carbondesign %}
{% DatePicker form.started_at %}
"""
expected = r"""
<div class="bx--form-item">
<div data-date-picker data-date-picker-type="single"
class="bx--date-picker bx--date-picker--single">
<div class="bx--date-picker-container">
<label for="id_started_at" class="bx--label">
Started at
</label>
<div class="bx--date-picker-input__wrapper">
<input type="text" name="started_at" value="2022-02-03 01:02:03" class="bx--date-picker__input" data-date-picker-input="" pattern="\d{1,2}/\d{1,2}/\d{4,4}" placeholder="mm/dd/yyyy" required id="id_started_at">
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor"
data-date-picker-icon="true" class="bx--date-picker__icon"
width="16" height="16" viewBox="0 0 32 32" aria-hidden="true">
<path d="M26,4h-4V2h-2v2h-8V2h-2v2H6C4.9,4,4,4.9,4,6v20c0,1.1,0.9,2,2,2h20c1.1,0,2-0.9,2-2V6C28,4.9,27.1,4,26,4z M26,26H6V12h20 V26z M26,10H6V6h4v2h2V6h8v2h2V6h4V10z"></path>
</svg>
</div>
</div>
</div>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
class RangeDatePickerTest(SimpleTestCase):
maxDiff = None
def test_rendered(self):
template = """
{% load carbondesign %}
{% RangeDatePicker form.started_at form.stopped_at %}
"""
expected = r"""
<div class="bx--form-item">
<div data-date-picker data-date-picker-type="range"
class="bx--date-picker bx--date-picker--range">
<div class="bx--date-picker-container">
<label for="id_started_at" class="bx--label">
Started at
</label>
<div class="bx--date-picker-input__wrapper">
<input type="text" name="started_at" value="2022-02-03 01:02:03" class="bx--date-picker__input" pattern="\d{1,2}/\d{1,2}/\d{4,4}" placeholder="mm/dd/yyyy" data-date-picker-input-from="" required id="id_started_at">
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor"
data-date-picker-icon="true" class="bx--date-picker__icon"
width="16" height="16" viewBox="0 0 32 32" aria-hidden="true">
<path d="M26,4h-4V2h-2v2h-8V2h-2v2H6C4.9,4,4,4.9,4,6v20c0,1.1,0.9,2,2,2h20c1.1,0,2-0.9,2-2V6C28,4.9,27.1,4,26,4z M26,26H6V12h20 V26z M26,10H6V6h4v2h2V6h8v2h2V6h4V10z"></path>
</svg>
</div>
</div>
<div class="bx--date-picker-container">
<label for="id_stopped_at" class="bx--label">
Stopped at
</label>
<div class="bx--date-picker-input__wrapper">
<input type="text" name="stopped_at" value="2022-10-04 11:30:40" class="bx--date-picker__input" pattern="\d{1,2}/\d{1,2}/\d{4,4}" placeholder="mm/dd/yyyy" data-date-picker-input-to="" required id="id_stopped_at">
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor"
data-date-picker-icon="true" class="bx--date-picker__icon"
width="16" height="16" viewBox="0 0 32 32" aria-hidden="true">
<path d="M26,4h-4V2h-2v2h-8V2h-2v2H6C4.9,4,4,4.9,4,6v20c0,1.1,0.9,2,2,2h20c1.1,0,2-0.9,2-2V6C28,4.9,27.1,4,26,4z M26,26H6V12h20 V26z M26,10H6V6h4v2h2V6h8v2h2V6h4V10z"></path>
</svg>
</div>
</div>
</div>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
| 45.890244
| 222
| 0.644167
| 563
| 3,763
| 4.232682
| 0.197158
| 0.109106
| 0.080571
| 0.099874
| 0.86026
| 0.86026
| 0.86026
| 0.835921
| 0.835921
| 0.820814
| 0
| 0.110259
| 0.168483
| 3,763
| 81
| 223
| 46.45679
| 0.651326
| 0.023917
| 0
| 0.813333
| 0
| 0.12
| 0.85726
| 0.397167
| 0
| 0
| 0
| 0
| 0.026667
| 1
| 0.026667
| false
| 0
| 0.013333
| 0
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cd30d123e9854a2b2d38e03cc6e2ff717596032d
| 25,267
|
py
|
Python
|
assemblyline/al_ui/apiv3/alert.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | 46
|
2017-05-15T11:15:08.000Z
|
2018-07-02T03:32:52.000Z
|
assemblyline/al_ui/apiv3/alert.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | null | null | null |
assemblyline/al_ui/apiv3/alert.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | 24
|
2017-05-17T03:26:17.000Z
|
2018-07-09T07:00:50.000Z
|
from flask import request
from assemblyline.al.common import forge, queue
from assemblyline.al.core.datastore import SearchException
from al_ui.apiv3 import core
from al_ui.config import STORAGE
from al_ui.api_base import api_login, make_api_response
from riak import RiakError
DATABASE_NUM = 4
SUB_API = 'alert'
QUEUE_PRIORITY = -2
Classification = forge.get_classification()
alert_api = core.make_subapi_blueprint(SUB_API)
alert_api._doc = "Perform operations on alerts"
@alert_api.route("/<alert_key>/", methods=["GET"])
@api_login(required_priv=['R'])
def get_alert(alert_key, **kwargs):
"""
Get the alert details for a given alert key
Variables:
alert_key => Alert key to get the details for
Arguments:
None
Data Block:
None
API call example:
/api/v3/alert/1234567890/
Result example:
{
KEY: VALUE, # All fields of an alert in key/value pair
}
"""
user = kwargs['user']
data = STORAGE.get_alert(alert_key)
if user and data and Classification.is_accessible(user['classification'], data['classification']):
return make_api_response(data)
else:
return make_api_response("", "You are not allowed to see this alert...", 403)
@alert_api.route("/statistics/", methods=["GET"])
@api_login()
def alerts_statistics(**kwargs):
"""
Load facet statistics for the alerts matching the query.
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
Result example:
"""
user = kwargs['user']
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.get_alert_statistics(query, access_control=user['access_control'],
fq_list=filter_queries,
start_time=start_time,
time_slice=time_slice))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/labels/", methods=["GET"])
@api_login()
def alerts_labels(**kwargs):
"""
Run a facet search to find the different labels matching the query.
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
Result example:
"""
user = kwargs['user']
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.get_alert_statistics(query, access_control=user['access_control'],
fq_list=filter_queries,
start_time=start_time,
time_slice=time_slice,
field_list=['label']).get('label', []))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/priorities/", methods=["GET"])
@api_login()
def alerts_priorities(**kwargs):
"""
Run a facet search to find the different priorities matching the query.
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
Result example:
"""
user = kwargs['user']
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.get_alert_statistics(query, access_control=user['access_control'],
fq_list=filter_queries,
start_time=start_time,
time_slice=time_slice,
field_list=['priority']).get('priority', []))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/statuses/", methods=["GET"])
@api_login()
def alerts_statuses(**kwargs):
"""
Run a facet search to find the different statuses matching the query.
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
Result example:
"""
user = kwargs['user']
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.get_alert_statistics(query, access_control=user['access_control'],
fq_list=filter_queries,
start_time=start_time,
time_slice=time_slice,
field_list=['status']).get('status', []))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/list/", methods=["GET"])
@api_login(required_priv=['R'])
def list_alerts(**kwargs):
"""
List all alert in the system (per page)
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
offset => Offset at which we start giving alerts
length => Numbers of alerts to return
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
API call example:
/api/v3/alert/list/
Result example:
{"total": 201, # Total alerts found
"offset": 0, # Offset in the alert list
"count": 100, # Number of alerts returned
"items": [] # List of alert blocks
}
"""
user = kwargs['user']
offset = int(request.args.get('offset', 0))
length = int(request.args.get('length', 100))
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.list_alerts(query, start=offset, rows=length,
access_control=user['access_control'],
fq_list=filter_queries,
start_time=start_time,
time_slice=time_slice))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/grouped/<field>/", methods=["GET"])
@api_login(required_priv=['R'])
def list_grouped_alerts(field, **kwargs):
"""
List all alert grouped by a given field
Variables:
None
Arguments:
start_time => Time offset at which to list alerts
time_slice => Length after the start time that we query
offset => Offset at which we start giving alerts
length => Numbers of alerts to return
filter => Filter to apply to the alert list
fq => Post filter queries (you can have multiple of those)
Data Block:
None
API call example:
/api/v3/alert/grouped/start_time/
Result example:
{"total": 201, # Total alerts found
"offset": 0, # Offset in the alert list
"count": 100, # Number of alerts returned
"items": [], # List of alert blocks
"start_time": "2015-05..." # UTC timestamp for future query (ISO Format)
}
"""
user = kwargs['user']
offset = int(request.args.get('offset', 0))
length = int(request.args.get('length', 100))
query = request.args.get('filter', "*")
if not query:
query = "*"
start_time = request.args.get('start_time', None)
time_slice = request.args.get('time_slice', None)
filter_queries = [x for x in request.args.getlist("fq") if x != ""]
try:
return make_api_response(STORAGE.list_grouped_alerts(query, field, start=offset, rows=length,
start_time=start_time,
time_slice=time_slice,
access_control=user['access_control'],
fq_list=filter_queries,
time_offset=-300.0))
except SearchException:
return make_api_response("", "The specified search query is not valid.", 400)
except RiakError, e:
if e.value == "Query unsuccessful check the logs.":
return make_api_response("", "The specified search query is not valid.", 400)
else:
raise
@alert_api.route("/label/<alert_id>/<labels>/", methods=["GET"])
@api_login(required_priv=['W'])
def add_labels(alert_id, labels, **kwargs):
"""
Add one or multiple labels to a given alert
Variables:
alert_id => ID of the alert to add the label to
labels => List of labels to add as comma separated string
Arguments:
None
Data Block:
None
API call example:
/api/v3/alert/label/1234567890/EMAIL/
Result example:
{"success": true,
"event_id": 0}
"""
user = kwargs['user']
labels = set(labels.upper().split(","))
alert = STORAGE.get_alert(alert_id)
if not alert:
return make_api_response({"success": False, "event_id": None},
err="Alert ID %s not found" % alert_id,
status_code=404)
if not Classification.is_accessible(user['classification'], alert['classification']):
return make_api_response("", "You are not allowed to see this alert...", 403)
cur_label = set(alert.get('label', []))
if labels.difference(labels.intersection(cur_label)):
cur_label = cur_label.union(labels)
alert['label'] = list(cur_label)
STORAGE.save_alert(alert_id, alert)
return make_api_response({"success": True})
else:
return make_api_response({"success": False},
err="Alert already has labels %s" % ", ".join(labels),
status_code=403)
@alert_api.route("/label/batch/<labels>/", methods=["GET"])
@api_login()
def add_labels_by_batch(labels, **kwargs):
"""
Apply labels to all alerts matching the given filters using a background process
Variables:
labels => List of labels to add as comma separated string
Arguments:
q => Main query to filter the data [REQUIRED]
tc => Time constraint to apply to the search
start => Time at which to start the days constraint
fq => Filter query applied to the data
Data Block:
None
API call example:
/api/v3/alert/label/batch/EMAIL/?q=protocol:SMTP
Result example:
{ "status": "QUEUED" }
"""
action_queue = queue.PriorityQueue('alert-actions', db=DATABASE_NUM)
labels = set(labels.upper().split(","))
user = kwargs['user']
q = request.args.get('q', None)
fq = request.args.getlist('fq')
if not q and not fq:
return make_api_response({"success": False,
"event_id": None},
err="You need to at least provide a query to filter the data", status_code=400)
if not q:
q = fq.pop(0)
tc = request.args.get('tc', None)
start = request.args.get('start', None)
msg = {
"user": user['uname'],
"action": "batch_workflow",
"query": q,
"tc": tc,
"start": start,
"fq": fq,
"label": list(labels),
"queue_priority": QUEUE_PRIORITY
}
action_queue.push(QUEUE_PRIORITY, msg)
return make_api_response({"status": "QUEUED"})
@alert_api.route("/priority/<alert_id>/<priority>/", methods=["GET"])
@api_login(required_priv=['W'])
def change_priority(alert_id, priority, **kwargs):
"""
Change the priority of a given alert
Variables:
alert_id => ID of the alert to change the priority
priority => New priority for the alert
Arguments:
None
Data Block:
None
API call example:
/api/v3/alert/priority/1234567890/MALICIOUS/
Result example:
{"success": true,
"event_id": 0}
"""
user = kwargs['user']
priority = priority.upper()
alert = STORAGE.get_alert(alert_id)
if not alert:
return make_api_response({"success": False, "event_id": None},
err="Alert ID %s not found" % alert_id,
status_code=404)
if not Classification.is_accessible(user['classification'], alert['classification']):
return make_api_response("", "You are not allowed to see this alert...", 403)
if priority != alert.get('priority', None):
alert['priority'] = priority
STORAGE.save_alert(alert_id, alert)
return make_api_response({"success": True})
else:
return make_api_response({"success": False},
err="Alert already has priority %s" % priority,
status_code=403)
@alert_api.route("/priority/batch/<priority>/", methods=["GET"])
@api_login()
def change_priority_by_batch(priority, **kwargs):
"""
Apply priority to all alerts matching the given filters using a background process
Variables:
priority => priority to apply
Arguments:
q => Main query to filter the data [REQUIRED]
tc => Time constraint to apply to the search
start => Time at which to start the days constraint
fq => Filter query applied to the data
Data Block:
None
API call example:
/api/v3/alert/priority/batch/HIGH/?q=al_av:*
Result example:
{"status": "QUEUED"}
"""
action_queue = queue.PriorityQueue('alert-actions', db=DATABASE_NUM)
priority = priority.upper()
user = kwargs['user']
q = request.args.get('q', None)
fq = request.args.getlist('fq')
if not q and not fq:
return make_api_response({"success": False,
"event_id": None},
err="You need to at least provide a query to filter the data", status_code=400)
if not q:
q = fq.pop(0)
tc = request.args.get('tc', None)
start = request.args.get('start', None)
msg = {
"user": user['uname'],
"action": "batch_workflow",
"query": q,
"tc": tc,
"start": start,
"fq": fq,
"priority": priority,
"queue_priority": QUEUE_PRIORITY
}
action_queue.push(QUEUE_PRIORITY, msg)
return make_api_response({"status": "QUEUED"})
@alert_api.route("/status/<alert_id>/<status>/", methods=["GET"])
@api_login(required_priv=['W'])
def change_status(alert_id, status, **kwargs):
"""
Change the status of a given alert
Variables:
alert_id => ID of the alert to change the status
status => New status for the alert
Arguments:
None
Data Block:
None
API call example:
/api/v3/alert/status/1234567890/MALICIOUS/
Result example:
{"success": true,
"event_id": 0}
"""
user = kwargs['user']
status = status.upper()
alert = STORAGE.get_alert(alert_id)
if not alert:
return make_api_response({"success": False, "event_id": None},
err="Alert ID %s not found" % alert_id,
status_code=404)
if not Classification.is_accessible(user['classification'], alert['classification']):
return make_api_response("", "You are not allowed to see this alert...", 403)
if status != alert.get('status', None):
alert['status'] = status
STORAGE.save_alert(alert_id, alert)
return make_api_response({"success": True})
else:
return make_api_response({"success": False},
err="Alert already has status %s" % status,
status_code=403)
@alert_api.route("/status/batch/<status>/", methods=["GET"])
@api_login()
def change_status_by_batch(status, **kwargs):
"""
Apply status to all alerts matching the given filters using a background process
Variables:
status => Status to apply
Arguments:
q => Main query to filter the data [REQUIRED]
tc => Time constraint to apply to the search
start => Time at which to start the days constraint
fq => Filter query applied to the data
Data Block:
None
API call example:
/api/v3/alert/status/batch/MALICIOUS/?q=al_av:*
Result example:
{"status": "QUEUED"}
"""
action_queue = queue.PriorityQueue('alert-actions', db=DATABASE_NUM)
status = status.upper()
user = kwargs['user']
q = request.args.get('q', None)
fq = request.args.getlist('fq')
if not q and not fq:
return make_api_response({"success": False,
"event_id": None},
err="You need to at least provide a query to filter the data", status_code=400)
if not q:
q = fq.pop(0)
tc = request.args.get('tc', None)
start = request.args.get('start', None)
msg = {
"user": user['uname'],
"action": "batch_workflow",
"query": q,
"tc": tc,
"start": start,
"fq": fq,
"status": status,
"queue_priority": QUEUE_PRIORITY
}
action_queue.push(QUEUE_PRIORITY, msg)
return make_api_response({"status": "QUEUED"})
@alert_api.route("/ownership/<alert_id>/", methods=["GET"])
@api_login(required_priv=['W'])
def take_ownership(alert_id, **kwargs):
"""
Take ownership of a given alert
Variables:
alert_id => ID of the alert to send to take ownership
Arguments:
None
Data Block:
None
API call example:
/api/v3/alert/ownership/1234567890/
Result example:
{"success": true}
"""
user = kwargs['user']
alert = STORAGE.get_alert(alert_id)
if not alert:
return make_api_response({"success": False},
err="Alert ID %s not found" % alert_id,
status_code=404)
if not Classification.is_accessible(user['classification'], alert['classification']):
return make_api_response({"success": False}, "You are not allowed to see this alert...", 403)
if alert.get('owner', None) is None:
alert.update({"owner": user['uname']})
STORAGE.save_alert(alert_id, alert)
return make_api_response({"success": True})
else:
return make_api_response({"success": False},
err="Alert is already owned by %s" % alert['owner'],
status_code=403)
@alert_api.route("/ownership/batch/", methods=["GET"])
@api_login()
def take_ownership_by_batch(**kwargs):
"""
Take ownership of all alerts matching the given filters using a background process
Variables:
None
Arguments:
q => Main query to filter the data [REQUIRED]
tc => Time constraint to apply to the search
start => Time at which to start the days constraint
fq => Filter query applied to the data
Data Block:
None
API call example:
/api/v3/alert/ownership/batch/?q=event_id:"helloworld"
Result example:
{ "success": true }
"""
action_queue = queue.PriorityQueue('alert-actions', db=DATABASE_NUM)
user = kwargs['user']
q = request.args.get('q', None)
fq = request.args.getlist('fq')
if not q and not fq:
return make_api_response({"success": False,
"event_id": None},
err="You need to at least provide a query to filter the data", status_code=400)
if not q:
q = fq.pop(0)
tc = request.args.get('tc', None)
start = request.args.get('start', None)
msg = {
"user": user['uname'],
"action": "ownership",
"query": q,
"tc": tc,
"start": start,
"fq": fq,
"queue_priority": QUEUE_PRIORITY
}
action_queue.push(QUEUE_PRIORITY, msg)
return make_api_response({"status": "QUEUED"})
@alert_api.route("/related/", methods=["GET"])
@api_login()
def find_related_alert_ids(**kwargs):
"""
Return the list of all IDs related to the currently selected query
Variables:
None
Arguments:
q => Main query to filter the data [REQUIRED]
tc => Time constraint to apply to the search
start => Time at which to start the days constraint
fq => Filter query applied to the data
Data Block:
None
API call example:
/api/v3/alert/related/?q=event_id:1
Result example:
["1"]
"""
user = kwargs['user']
q = request.args.get('q', None)
fq = request.args.getlist('fq')
if not q and not fq:
return make_api_response({"success": False,
"event_id": None},
err="You need to at least provide a query to filter the data", status_code=400)
if not q:
q = fq.pop(0)
tc = request.args.get('tc', None)
stime = request.args.get('start', None)
fq_list = []
if tc is not None and tc != "":
if stime is not None:
fq_list.append("reporting_ts:[%s-%s TO %s]" % (stime, tc, stime))
else:
fq_list.append("reporting_ts:[NOW-%s TO NOW]" % tc)
elif stime is not None and stime != "":
fq_list.append("reporting_ts:[* TO %s]" % stime)
if fq:
if isinstance(fq, list):
fq_list.extend(fq)
elif fq != "":
fq_list.append(fq)
return make_api_response([x['event_id'] for x in
STORAGE.stream_search('alert', q, fq=fq_list, access_control=user['access_control'])])
| 31.58375
| 116
| 0.572446
| 3,052
| 25,267
| 4.608781
| 0.079292
| 0.037537
| 0.050121
| 0.068676
| 0.81857
| 0.793403
| 0.771292
| 0.771292
| 0.759775
| 0.733044
| 0
| 0.011414
| 0.316896
| 25,267
| 799
| 117
| 31.623279
| 0.803534
| 0
| 0
| 0.760417
| 0
| 0
| 0.170993
| 0.010303
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.018229
| null | null | 0.002604
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cd3b0c5fa6fb95873e7b5d877b0eead3532f6137
| 23,178
|
py
|
Python
|
DENT/utils/pgd_attack.py
|
jfc43/eval-transductive-robustness
|
91aea64cc69be1e3f4d14f94de9ff976c8c307df
|
[
"Apache-2.0"
] | null | null | null |
DENT/utils/pgd_attack.py
|
jfc43/eval-transductive-robustness
|
91aea64cc69be1e3f4d14f94de9ff976c8c307df
|
[
"Apache-2.0"
] | null | null | null |
DENT/utils/pgd_attack.py
|
jfc43/eval-transductive-robustness
|
91aea64cc69be1e3f4d14f94de9ff976c8c307df
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import torch
import torch.nn as nn
import numpy as np
import utils.torch
class LinfConfPGDAttack:
"""
Confident PGD Attack with order=Linf
:param loss_func: loss function.
:param eps: maximum distortion.
:param nb_iter: number of iterations.
:param eps_iter: attack step size.
:param rand_init: (optional bool) random initialization.
:param clip_min: mininum value per input dimension.
:param clip_max: maximum value per input dimension.
:param targeted: if the attack is targeted.
"""
def __init__(
self, model, eps=0.1, nb_iter=100,
eps_iter=0.01, rand_init=True, clip_min=0., clip_max=1.,
targeted=False, num_classes=10, elementwise_best=False, num_rand_init=1):
self.eps = eps
self.nb_iter = nb_iter
self.eps_iter = eps_iter
self.rand_init = rand_init
self.targeted = targeted
self.elementwise_best = elementwise_best
self.model = model
self.num_classes = num_classes
self.num_rand_init = num_rand_init
self.loss_func = nn.CrossEntropyLoss(reduction='none')
self.clip_min = clip_min
self.clip_max = clip_max
def get_loss(self, x, y, targeted=False, y_target=None):
logits = self.model(x)
if targeted:
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits[u, y_target])
else:
logits_sorted, ind_sorted = logits.sort(dim=1)
ind = (ind_sorted[:, -1] == y).float()
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits_sorted[:, -2] * ind - logits_sorted[:, -1] * (
1. - ind))
return loss
def perturb_once(self, x, y, targeted=False, y_target=None):
delta = torch.zeros_like(x)
delta = nn.Parameter(delta)
delta.requires_grad_()
if self.elementwise_best:
loss = self.get_loss(x, y, targeted, y_target)
worst_loss = loss.data.clone()
worst_perb = delta.data.clone()
if self.rand_init:
delta.data.uniform_(-self.eps, self.eps)
delta.data = (torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data)
for ii in range(self.nb_iter):
adv_x = x + delta
loss = self.get_loss(adv_x, y, targeted, y_target)
if self.elementwise_best:
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
loss.mean().backward()
grad_sign = delta.grad.data.sign()
delta.data = delta.data + grad_sign * self.eps_iter
delta.data = torch.clamp(delta.data, min=-self.eps, max=self.eps)
delta.data = torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data
delta.grad.data.zero_()
if self.elementwise_best:
adv_x = x + delta
loss = self.get_loss(adv_x, y, targeted, y_target)
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
else:
worst_perb = delta.data
return worst_perb
def get_error(self, x, y):
with torch.no_grad():
logits = self.model(x)
loss = utils.torch.f7p_loss(logits, y, reduction='none')
return loss
def perturb(self, x, y):
"""
Given examples (x, y), returns their adversarial counterparts with
an attack length of eps.
:param x: input tensor.
:param y: label tensor.
:return: tensor containing perturbed inputs.
"""
self.model.eval()
x = x.detach().clone().cuda()
y = y.detach().clone().cuda()
worst_error = self.get_error(x, y)
worst_perb = torch.zeros_like(x)
for i in range(self.num_rand_init):
curr_worst_perb = self.perturb_once(x, y, targeted=False)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
for k in range(1, self.num_classes):
y_target = (y + k) % self.num_classes
curr_worst_perb = self.perturb_once(x, y, targeted=True, y_target=y_target)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
return x + worst_perb
class ConfGMSAMINAttack:
"""
GMSA-MIN Attack with order=Linf
:param loss_func: loss function.
:param eps: maximum distortion.
:param nb_iter: number of iterations.
:param eps_iter: attack step size.
:param rand_init: (optional bool) random initialization.
:param clip_min: mininum value per input dimension.
:param clip_max: maximum value per input dimension.
:param targeted: if the attack is targeted.
"""
def __init__(
self, models, eps=0.1, nb_iter=100,
eps_iter=0.01, rand_init=True, clip_min=0., clip_max=1.,
targeted=False, num_classes=10, elementwise_best=False,
num_rand_init=1, batch_size=64):
self.eps = eps
self.nb_iter = nb_iter
self.eps_iter = eps_iter
self.rand_init = rand_init
self.targeted = targeted
self.elementwise_best = elementwise_best
self.models = models
self.num_classes = num_classes
self.num_rand_init = num_rand_init
self.loss_func = nn.CrossEntropyLoss(reduction='none')
self.batch_size = batch_size
self.clip_min = clip_min
self.clip_max = clip_max
def get_loss(self, model, x, y, targeted=False, y_target=None):
logits = model(x)
if targeted:
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits[u, y_target])
else:
logits_sorted, ind_sorted = logits.sort(dim=1)
ind = (ind_sorted[:, -1] == y).float()
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits_sorted[:, -2] * ind - logits_sorted[:, -1] * (
1. - ind))
return loss
def get_ensemble_loss(self, x, y, targeted=False, y_target=None):
min_loss = None
for model in self.models:
curr_loss = self.get_loss(model, x, y, targeted, y_target)
if min_loss is None:
min_loss = curr_loss
else:
cond = curr_loss.data < min_loss.data
min_loss[cond] = curr_loss[cond]
return min_loss
def restore_batchnorm(self, model):
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.weight = nn.Parameter(m.ckpt_weight_bak, requires_grad=False)
m.bias = nn.Parameter(m.ckpt_bias_bak, requires_grad=False)
m.requires_grad_(False)
def configure_batchnorm(self, model, start_idx, end_idx):
"""Configure model."""
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.weight = nn.Parameter(m.ckpt_weight_bak[start_idx:end_idx], requires_grad=False)
m.bias = nn.Parameter(m.ckpt_bias_bak[start_idx:end_idx], requires_grad=False)
m.requires_grad_(False)
def add_model(self, model):
self.models.append(model)
self.save_batchnorm(model)
def save_batchnorm(self, model):
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.register_buffer("ckpt_weight_bak", m.weight)
m.register_buffer("ckpt_bias_bak", m.bias)
def perturb_once(self, x, y, targeted=False, y_target=None):
delta = torch.zeros_like(x)
delta = nn.Parameter(delta)
delta.requires_grad_()
if self.elementwise_best:
with torch.no_grad():
loss = self.get_ensemble_loss(x, y, targeted, y_target)
worst_loss = loss.data.clone()
worst_perb = delta.data.clone()
if self.rand_init:
delta.data.uniform_(-self.eps, self.eps)
delta.data = (torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data)
for ii in range(self.nb_iter*len(self.models)):
adv_x = x + delta
loss = self.get_ensemble_loss(adv_x, y, targeted, y_target)
if self.elementwise_best:
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
loss.mean().backward()
grad_sign = delta.grad.data.sign()
delta.data = delta.data + grad_sign * self.eps_iter
delta.data = torch.clamp(delta.data, min=-self.eps, max=self.eps)
delta.data = torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data
delta.grad.data.zero_()
if self.elementwise_best:
adv_x = x + delta
with torch.no_grad():
loss = self.get_ensemble_loss(adv_x, y, targeted, y_target)
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
else:
worst_perb = delta.data
return worst_perb
def get_error(self, x, y):
min_loss = None
with torch.no_grad():
for model in self.models:
logits = model(x)
curr_loss = utils.torch.f7p_loss(logits, y, reduction='none')
if min_loss is None:
min_loss = curr_loss
else:
cond = curr_loss.data < min_loss.data
min_loss[cond] = curr_loss[cond]
return min_loss
def perturb(self, x_test, y_test):
"""
Given examples (x, y), returns their adversarial counterparts with
an attack length of eps.
:param x: input tensor.
:param y: label tensor.
:return: tensor containing perturbed inputs.
"""
for model in self.models:
model.eval()
x_test = x_test.detach().clone().cuda()
y_test = y_test.detach().clone().cuda()
adv_x_test = []
n_batches = int(np.ceil(x_test.shape[0] / self.batch_size))
for batch_idx in range(n_batches):
start_idx = batch_idx * self.batch_size
end_idx = min((batch_idx + 1) * self.batch_size, x_test.shape[0])
for k, model in enumerate(self.models):
if k>0:
self.configure_batchnorm(model, start_idx, end_idx)
x = x_test[start_idx:end_idx, :]
y = y_test[start_idx:end_idx]
worst_error = self.get_error(x, y)
worst_perb = torch.zeros_like(x)
for i in range(self.num_rand_init):
curr_worst_perb = self.perturb_once(x, y, targeted=False)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
for k in range(1, self.num_classes):
y_target = (y + k) % self.num_classes
curr_worst_perb = self.perturb_once(x, y, targeted=True, y_target=y_target)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
adv_x_test.append(x + worst_perb)
adv_x_test = torch.cat(adv_x_test, dim=0)
for k, model in enumerate(self.models):
if k>0:
self.restore_batchnorm(model)
return adv_x_test
class ConfGMSAAVGAttack:
"""
GMSA-AVG Attack with order=Linf
:param loss_func: loss function.
:param eps: maximum distortion.
:param nb_iter: number of iterations.
:param eps_iter: attack step size.
:param rand_init: (optional bool) random initialization.
:param clip_min: mininum value per input dimension.
:param clip_max: maximum value per input dimension.
:param targeted: if the attack is targeted.
"""
def __init__(
self, models, eps=0.1, nb_iter=100,
eps_iter=0.01, rand_init=True, clip_min=0., clip_max=1.,
targeted=False, num_classes=10, elementwise_best=False,
num_rand_init=1, batch_size=64):
self.eps = eps
self.nb_iter = nb_iter
self.eps_iter = eps_iter
self.rand_init = rand_init
self.targeted = targeted
self.elementwise_best = elementwise_best
self.models = models
self.num_classes = num_classes
self.num_rand_init = num_rand_init
self.loss_func = nn.CrossEntropyLoss(reduction='none')
self.batch_size = batch_size
self.clip_min = clip_min
self.clip_max = clip_max
def get_loss(self, model, x, y, targeted=False, y_target=None):
logits = model(x)
if targeted:
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits[u, y_target])
else:
logits_sorted, ind_sorted = logits.sort(dim=1)
ind = (ind_sorted[:, -1] == y).float()
u = torch.arange(logits.shape[0])
loss = -(logits[u, y] - logits_sorted[:, -2] * ind - logits_sorted[:, -1] * (
1. - ind))
return loss
def get_ensemble_loss(self, x, y, targeted=False, y_target=None, update=False):
loss = 0.0
for model in self.models:
curr_loss = self.get_loss(model, x, y, targeted, y_target)
if update:
curr_loss.mean().backward()
loss += curr_loss.data
return loss
def restore_batchnorm(self, model):
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.weight = nn.Parameter(m.ckpt_weight_bak, requires_grad=False)
m.bias = nn.Parameter(m.ckpt_bias_bak, requires_grad=False)
m.requires_grad_(False)
def configure_batchnorm(self, model, start_idx, end_idx):
"""Configure model."""
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.weight = nn.Parameter(m.ckpt_weight_bak[start_idx:end_idx], requires_grad=False)
m.bias = nn.Parameter(m.ckpt_bias_bak[start_idx:end_idx], requires_grad=False)
m.requires_grad_(False)
def add_model(self, model):
self.models.append(model)
self.save_batchnorm(model)
def save_batchnorm(self, model):
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.register_buffer("ckpt_weight_bak", m.weight)
m.register_buffer("ckpt_bias_bak", m.bias)
def perturb_once(self, x, y, targeted=False, y_target=None):
delta = torch.zeros_like(x)
delta = nn.Parameter(delta)
delta.requires_grad_()
if self.elementwise_best:
with torch.no_grad():
loss = self.get_ensemble_loss(x, y, targeted, y_target, update=False)
worst_loss = loss.data.clone()
worst_perb = delta.data.clone()
if self.rand_init:
delta.data.uniform_(-self.eps, self.eps)
delta.data = (torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data)
for ii in range(self.nb_iter):
adv_x = x + delta
loss = self.get_ensemble_loss(adv_x, y, targeted, y_target, update=True)
if self.elementwise_best:
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
grad_sign = delta.grad.data.sign()
delta.data = delta.data + grad_sign * self.eps_iter
delta.data = torch.clamp(delta.data, min=-self.eps, max=self.eps)
delta.data = torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data
delta.grad.data.zero_()
if self.elementwise_best:
adv_x = x + delta
with torch.no_grad():
loss = self.get_ensemble_loss(adv_x, y, targeted, y_target, update=False)
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
else:
worst_perb = delta.data
return worst_perb
def get_error(self, x, y):
loss = 0.0
with torch.no_grad():
for model in self.models:
logits = model(x)
curr_loss = utils.torch.f7p_loss(logits, y, reduction='none')
loss += curr_loss.data
return loss
def perturb(self, x_test, y_test):
"""
Given examples (x, y), returns their adversarial counterparts with
an attack length of eps.
:param x: input tensor.
:param y: label tensor.
:return: tensor containing perturbed inputs.
"""
for model in self.models:
model.eval()
x_test = x_test.detach().clone().cuda()
y_test = y_test.detach().clone().cuda()
adv_x_test = []
n_batches = int(np.ceil(x_test.shape[0] / self.batch_size))
for batch_idx in range(n_batches):
start_idx = batch_idx * self.batch_size
end_idx = min((batch_idx + 1) * self.batch_size, x_test.shape[0])
for k, model in enumerate(self.models):
if k>0:
self.configure_batchnorm(model, start_idx, end_idx)
x = x_test[start_idx:end_idx, :]
y = y_test[start_idx:end_idx]
worst_error = self.get_error(x, y)
worst_perb = torch.zeros_like(x)
for i in range(self.num_rand_init):
curr_worst_perb = self.perturb_once(x, y, targeted=False)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
for k in range(1, self.num_classes):
y_target = (y + k) % self.num_classes
curr_worst_perb = self.perturb_once(x, y, targeted=True, y_target=y_target)
curr_error = self.get_error(x+curr_worst_perb, y)
cond = curr_error.data > worst_error.data
worst_error[cond] = curr_error[cond]
worst_perb[cond] = curr_worst_perb[cond]
adv_x_test.append(x + worst_perb)
adv_x_test = torch.cat(adv_x_test, dim=0)
for k, model in enumerate(self.models):
if k>0:
self.restore_batchnorm(model)
return adv_x_test
class LinfPGDAttack:
"""
PGD Attack with order=Linf
:param loss_func: loss function.
:param eps: maximum distortion.
:param nb_iter: number of iterations.
:param eps_iter: attack step size.
:param rand_init: (optional bool) random initialization.
:param clip_min: mininum value per input dimension.
:param clip_max: maximum value per input dimension.
:param targeted: if the attack is targeted.
"""
def __init__(
self, model, eps=0.1, nb_iter=100,
eps_iter=0.01, rand_init=True, clip_min=0., clip_max=1.,
targeted=False, num_classes=10, elementwise_best=False, num_rand_init=1):
self.eps = eps
self.nb_iter = nb_iter
self.eps_iter = eps_iter
self.rand_init = rand_init
self.targeted = targeted
self.elementwise_best = elementwise_best
self.model = model
self.num_classes = num_classes
self.loss_func = nn.CrossEntropyLoss(reduction='none')
self.num_rand_init = num_rand_init
self.clip_min = clip_min
self.clip_max = clip_max
def get_loss(self, x, y):
outputs = self.model(x)
loss = self.loss_func(outputs, y)
if self.targeted:
loss = -loss
return loss
def perturb_once(self, x, y):
delta = torch.zeros_like(x)
delta = nn.Parameter(delta)
delta.requires_grad_()
if self.elementwise_best:
loss = self.get_loss(x, y)
worst_loss = loss.data.clone()
worst_perb = delta.data.clone()
if self.rand_init:
delta.data.uniform_(-self.eps, self.eps)
delta.data = (torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data)
for ii in range(self.nb_iter):
adv_x = x + delta
loss = self.get_loss(adv_x, y)
if self.elementwise_best:
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
loss.mean().backward()
grad_sign = delta.grad.data.sign()
delta.data = delta.data + grad_sign * self.eps_iter
delta.data = torch.clamp(delta.data, min=-self.eps, max=self.eps)
delta.data = torch.clamp(x.data + delta.data, min=self.clip_min, max=self.clip_max) - x.data
delta.grad.data.zero_()
if self.elementwise_best:
adv_x = x + delta
loss = self.get_loss(adv_x, y)
cond = loss.data > worst_loss
worst_loss[cond] = loss.data[cond]
worst_perb[cond] = delta.data[cond]
else:
worst_perb = delta.data
return worst_perb
def perturb(self, x, y):
"""
Given examples (x, y), returns their adversarial counterparts with
an attack length of eps.
:param x: input tensor.
:param y: label tensor.
:return: tensor containing perturbed inputs.
"""
self.model.eval()
x = x.detach().clone().cuda()
y = y.detach().clone().cuda()
worst_error = None
worst_perb = None
for i in range(self.num_rand_init):
curr_worst_perb = self.perturb_once(x, y)
with torch.no_grad():
curr_error = self.get_loss(x+curr_worst_perb, y)
if worst_error is None:
worst_error = curr_error.data
worst_perb = curr_worst_perb
else:
cond = curr_error.data > worst_error
worst_error[cond] = curr_error.data[cond]
worst_perb[cond] = curr_worst_perb[cond]
return x + worst_perb
| 35.603687
| 106
| 0.581845
| 3,087
| 23,178
| 4.147392
| 0.054098
| 0.040772
| 0.019527
| 0.019917
| 0.964071
| 0.96079
| 0.955557
| 0.950871
| 0.942982
| 0.937905
| 0
| 0.006615
| 0.315126
| 23,178
| 650
| 107
| 35.658462
| 0.799924
| 0.098412
| 0
| 0.917226
| 0
| 0
| 0.004102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064877
| false
| 0
| 0.011186
| 0
| 0.123043
| 0.002237
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2695d201a9f50a6d413abe2ea4c4aa9a45f0f8b8
| 12,157
|
py
|
Python
|
python/test_surfacecorrections.py
|
GlenRice-NOAA/BAG
|
19a79a14fdb1f38a51169ec009a72f68dfd6c054
|
[
"BSD-3-Clause"
] | null | null | null |
python/test_surfacecorrections.py
|
GlenRice-NOAA/BAG
|
19a79a14fdb1f38a51169ec009a72f68dfd6c054
|
[
"BSD-3-Clause"
] | null | null | null |
python/test_surfacecorrections.py
|
GlenRice-NOAA/BAG
|
19a79a14fdb1f38a51169ec009a72f68dfd6c054
|
[
"BSD-3-Clause"
] | null | null | null |
from bagPy import *
from math import isclose
import shutil, pathlib
import bagMetadataSamples, testUtils
import sys
# define constants used in multiple tests
datapath = str(pathlib.Path(__file__).parent.absolute()) + "/../examples/sample-data"
chunkSize = 100
compressionLevel = 6
# define the unit test methods:
print("Testing SurfaceCorrections")
def testReadIrregular():
bagFileName = datapath + "/sample.bag"
dataset = Dataset.openDataset(bagFileName, BAG_OPEN_READONLY)
assert(dataset)
corrections = dataset.getSurfaceCorrections()
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor.getSurfaceType() == BAG_SURFACE_IRREGULARLY_SPACED)
kExpectedNumCorrectors = 2
assert(descriptor.getNumCorrectors() == kExpectedNumCorrectors)
def testCreateIrregular():
tmpFile = testUtils.RandomFileGuard("name")
dataset = Dataset.create(tmpFile.getName(), Metadata(), chunkSize, compressionLevel)
assert(dataset)
kExpectedSurfaceType = BAG_SURFACE_IRREGULARLY_SPACED
kExpectedNumCorrectors = 4
corrections = dataset.createSurfaceCorrections(
kExpectedSurfaceType, kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor.getNumCorrectors() == kExpectedNumCorrectors)
assert(descriptor.getSurfaceType() == kExpectedSurfaceType)
assert(descriptor.getVerticalDatums() == "")
assert(descriptor.getOrigin() == (0.0, 0.0))
assert(descriptor.getSpacing() == (0.0, 0.0))
del dataset #ensure dataset is deleted before tmpFile
def testCreateGridded():
tmpFile = testUtils.RandomFileGuard("name")
dataset = Dataset.create(tmpFile.getName(), Metadata(), chunkSize, compressionLevel)
assert(dataset)
kExpectedSurfaceType = BAG_SURFACE_GRID_EXTENTS
kExpectedNumCorrectors = 2
corrections = dataset.createSurfaceCorrections(
kExpectedSurfaceType, kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor.getNumCorrectors() == kExpectedNumCorrectors)
assert(descriptor.getSurfaceType() == kExpectedSurfaceType)
assert(descriptor.getVerticalDatums() == "")
assert(descriptor.getOrigin() == (0.0, 0.0))
assert(descriptor.getSpacing() == (0.0, 0.0))
del dataset #ensure dataset is deleted before tmpFile
def testCreateWriteIrregular():
tmpFile = testUtils.RandomFileGuard("name")
dataset = Dataset.create(tmpFile.getName(), Metadata(), chunkSize, compressionLevel)
assert(dataset)
kExpectedSurfaceType = BAG_SURFACE_IRREGULARLY_SPACED
kExpectedNumCorrectors = 2
corrections = dataset.createSurfaceCorrections(kExpectedSurfaceType,
kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor)
# Write a record.
kExpectedItem0 = BagVerticalDatumCorrections(1.2, 2.1, (3.4, 4.5))
kRowStart = 0
kColumnStart = 0
kRowEnd = 0
kColumnEnd = 0
items = SurfaceCorrectionsLayerItems((kExpectedItem0,))
corrections.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, items)
# Read the record back.
result = corrections.read(kRowStart, kColumnStart, kRowEnd, kColumnEnd)
assert(result)
correctionsResult = result.asVerticalDatumCorrections()
assert(len(correctionsResult) == 1)
assert(isclose(correctionsResult[0].x, kExpectedItem0.x, abs_tol = 1e-5))
assert(isclose(correctionsResult[0].y, kExpectedItem0.y, abs_tol = 1e-5))
actualZValues = correctionsResult[0].zValues()
expectedZValues = kExpectedItem0.zValues()
assert(all(isclose(actual, expected, abs_tol = 1e-5)
for actual, expected in zip(actualZValues, expectedZValues)))
del dataset #ensure dataset is deleted before tmpFile
def testCreateWriteGridded():
tmpFile = testUtils.RandomFileGuard("name")
#print(tmpFile.getName())
dataset = Dataset.create(tmpFile.getName(), Metadata(), chunkSize, compressionLevel)
assert(dataset)
kExpectedSurfaceType = BAG_SURFACE_GRID_EXTENTS
kExpectedNumCorrectors = 3
corrections = dataset.createSurfaceCorrections(kExpectedSurfaceType,
kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor)
# Write a record.
kExpectedItem0 = BagVerticalDatumCorrectionsGridded((9.87, 6.543, 2.109876))
kRowStart = 0
kColumnStart = 0
kRowEnd = 0
kColumnEnd = 0
items = SurfaceCorrectionsGriddedLayerItems((kExpectedItem0,))
corrections.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, items)
# Read the record back.
result = corrections.read(kRowStart, kColumnStart, kRowEnd, kColumnEnd)
assert(result)
correctionsResult = result.asVerticalDatumCorrectionsGridded()
assert(len(correctionsResult) == 1)
actualZValues = correctionsResult[0].zValues()
expectedZValues = kExpectedItem0.zValues()
assert(all(isclose(actual, expected, abs_tol = 1e-5)
for actual, expected in zip(actualZValues, expectedZValues)))
del dataset #ensure dataset is deleted before tmpFile
def testCreateWriteTwoGridded():
tmpFile = testUtils.RandomFileGuard("name")
#print(tmpFile.getName())
kRowStart = 0
kColumnStart = 0
kRowEnd = 0
kColumnEnd = 1
kExpectedNumCorrectors = 3
kExpectedItems = (BagVerticalDatumCorrectionsGridded((1.23, 4.56, 7.89)),
BagVerticalDatumCorrectionsGridded((9.87, 6.54, 3.21)))
# Write the surface corrections.
metadata = Metadata()
metadata.loadFromBuffer(bagMetadataSamples.kMetadataXML)
dataset = Dataset.create(tmpFile.getName(), metadata, chunkSize, compressionLevel)
assert(dataset)
kExpectedSurfaceType = BAG_SURFACE_GRID_EXTENTS
corrections = dataset.createSurfaceCorrections(kExpectedSurfaceType,
kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
descriptor = corrections.getDescriptor()
assert(descriptor)
assert(descriptor.getNumCorrectors() == kExpectedNumCorrectors)
assert(descriptor.getSurfaceType() == kExpectedSurfaceType)
items = SurfaceCorrectionsGriddedLayerItems(kExpectedItems)
corrections.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, items)
# Read the surface corrections back.
dataset = Dataset.openDataset(tmpFile.getName(), BAG_OPEN_READONLY)
assert(dataset)
corrections = dataset.getSurfaceCorrections()
assert(corrections)
result = corrections.read(kRowStart, kColumnStart, kRowEnd, kColumnEnd)
assert(result)
correctionsResult = result.asVerticalDatumCorrectionsGridded()
assert(len(correctionsResult) == 2)
actualZValues0 = correctionsResult[0].zValues()
expectedZValues0 = kExpectedItems[0].zValues()
assert(all(isclose(actual, expected, abs_tol = 1e-5)
for actual, expected in zip(actualZValues0, expectedZValues0)))
actualZValues1 = correctionsResult[1].zValues()
expectedZValues1 = kExpectedItems[1].zValues()
assert(all(isclose(actual, expected, abs_tol = 1e-5)
for actual, expected in zip(actualZValues1, expectedZValues1)))
del dataset #ensure dataset is deleted before tmpFile
# This test was thrown together from existing unit tests to
# test that readCorrectedRow can be called through python.
# The simple and corrections layers used do not match one
# another, so the returned results are of the correct type
# but have meaningless values.
def testReadCorrectedRow():
tmpFile = testUtils.RandomFileGuard("name")
metadata = Metadata()
metadata.loadFromBuffer(bagMetadataSamples.kMetadataXML)
dataset = Dataset.create(tmpFile.getName(), metadata, chunkSize, compressionLevel)
assert(dataset)
# Create and write float values to a simple layer.
simpleLayer = dataset.createSimpleLayer(Average_Elevation, chunkSize, compressionLevel)
assert(simpleLayer)
kFloatValue = 123.456
kExpectedNumNodes = 12
origBuffer = (kFloatValue,) * kExpectedNumNodes
buffer = FloatLayerItems(origBuffer)
simpleLayer.write(1, 2, 3, 5, buffer)
# Create and write the surface corrections layer
kExpectedSurfaceType = BAG_SURFACE_GRID_EXTENTS
kExpectedNumCorrectors = 3
corrections = dataset.createSurfaceCorrections(kExpectedSurfaceType,
kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
kExpectedItem0 = BagVerticalDatumCorrectionsGridded((9.87, 6.543, 2.109876))
kRowStart = 0
kColumnStart = 0
kRowEnd = 0
kColumnEnd = 0
items = SurfaceCorrectionsGriddedLayerItems((kExpectedItem0,))
corrections.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, items)
# get the Simple and SurfaceCorrections Layers
correctionsB = dataset.getSurfaceCorrections()
assert(correctionsB)
simpleLayerB = dataset.getSimpleLayer(Average_Elevation)
assert(simpleLayerB)
row = 0
corrector = 1
correctedData = correctionsB.readCorrectedRow(row,
kColumnStart, kColumnEnd, corrector, simpleLayerB)
# Since the corrections layer does not match the simple layer,
# the returned values are meaningless, but are of the correct type.
correctedFloats = correctedData.asFloatItems()
assert(len(correctedFloats) == 1)
del dataset #ensure dataset is deleted before tmpFile
# This test was thrown together from existing unit tests to
# test that readCorrected can be called through python.
# The simple and corrections layers used do not match one
# another, so the returned results are of the correct type
# but have meaningless values.
def testReadCorrected():
tmpFile = testUtils.RandomFileGuard("name")
metadata = Metadata()
metadata.loadFromBuffer(bagMetadataSamples.kMetadataXML)
dataset = Dataset.create(tmpFile.getName(), metadata, chunkSize, compressionLevel)
assert(dataset)
# Create and write float values to a simple layer.
simpleLayer = dataset.createSimpleLayer(Average_Elevation, chunkSize, compressionLevel)
assert(simpleLayer)
kFloatValue = 123.456
kExpectedNumNodes = 12
origBuffer = (kFloatValue,) * kExpectedNumNodes
buffer = FloatLayerItems(origBuffer)
simpleLayer.write(1, 2, 3, 5, buffer)
# Create and write the surface corrections layer
kExpectedSurfaceType = BAG_SURFACE_GRID_EXTENTS
kExpectedNumCorrectors = 3
corrections = dataset.createSurfaceCorrections(kExpectedSurfaceType,
kExpectedNumCorrectors, chunkSize, compressionLevel)
assert(corrections)
kExpectedItem0 = BagVerticalDatumCorrectionsGridded((9.87, 6.543, 2.109876))
kRowStart = 0
kColumnStart = 0
kRowEnd = 0
kColumnEnd = 0
items = SurfaceCorrectionsGriddedLayerItems((kExpectedItem0,))
corrections.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, items)
# get the Simple and SurfaceCorrections Layers
correctionsB = dataset.getSurfaceCorrections()
assert(correctionsB)
simpleLayerB = dataset.getSimpleLayer(Average_Elevation)
assert(simpleLayerB)
corrector = 1
correctedData = correctionsB.readCorrected(kRowStart, kRowEnd,
kColumnStart, kColumnEnd, corrector, simpleLayerB)
# Since the corrections layer does not match the simple layer,
# the returned values are meaningless, but are of the correct type.
correctedFloats = correctedData.asFloatItems()
assert(len(correctedFloats) == 1)
del dataset #ensure dataset is deleted before tmpFile
# run the unit test methods
testReadIrregular()
testCreateIrregular()
testCreateGridded()
testCreateWriteIrregular()
testCreateWriteGridded()
testCreateWriteTwoGridded()
#TODO: adjust these readCorrected() tests once suitable data
# has been found/created. These tests just ensure that the
# methods can be called, and that results of the correct type
# are returned, but they currently return meaningless values.
testReadCorrectedRow()
testReadCorrected()
| 33.769444
| 91
| 0.746484
| 1,138
| 12,157
| 7.93761
| 0.182777
| 0.030112
| 0.05491
| 0.033654
| 0.801506
| 0.801506
| 0.79597
| 0.77394
| 0.762427
| 0.749917
| 0
| 0.018638
| 0.170272
| 12,157
| 359
| 92
| 33.86351
| 0.876871
| 0.152669
| 0
| 0.783186
| 0
| 0
| 0.008676
| 0.00234
| 0
| 0
| 0
| 0.002786
| 0.243363
| 1
| 0.035398
| false
| 0
| 0.022124
| 0
| 0.057522
| 0.004425
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26eb4031acf2f4af0ba5c4bc641125e3fbf60085
| 7,949
|
py
|
Python
|
tests/routes/test_products.py
|
EZhivaikin/TrialPython
|
2424b85faabc8eecea108cc5b16a4ec6b7a12cf1
|
[
"BSD-2-Clause"
] | null | null | null |
tests/routes/test_products.py
|
EZhivaikin/TrialPython
|
2424b85faabc8eecea108cc5b16a4ec6b7a12cf1
|
[
"BSD-2-Clause"
] | null | null | null |
tests/routes/test_products.py
|
EZhivaikin/TrialPython
|
2424b85faabc8eecea108cc5b16a4ec6b7a12cf1
|
[
"BSD-2-Clause"
] | null | null | null |
from flask import url_for, json
from app.models.products import Product
from app.settings import MAX_CATEGORIES_COUNT, MIN_CATEGORIES_COUNT
from tests.factories import ProductFactory, BrandFactory, CategoryFactory
class TestProducts:
def test_get_products(self, client):
response = client.get(url_for("products.get_products"))
products_dict = json.loads(response.data)
assert response.status_code == 200
assert 'results' in products_dict
def test_create_product_with_not_existing_brand_should_raise_404(self, product_request, client):
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 404
assert 'error' in response_dict
assert response_dict['field'] == 'brand_id'
def test_create_product_not_enough_categories_should_raise_400(self, product_request, client):
if MIN_CATEGORIES_COUNT == 0:
assert True
return
product_request['categories'] = []
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'validation_error' in response_dict
def test_create_product_too_many_categories_should_raise_400(self, product_request, client):
product_request['categories'] = list(range(MAX_CATEGORIES_COUNT + 1))
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'validation_error' in response_dict
def test_create_product_incorrect_expiration_date_should_raise_400(self, product_request, client):
product_request['expiration_date'] = '2020-04-23T18:25:43Z'
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'error' in response_dict
assert response_dict['field'] == 'expiration_date'
def test_update_product_with_not_existing_product_should_raise_404(self, product_request, client):
response = client.put(url_for("products.update_product", id=1), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 404
assert 'error' in response_dict
assert response_dict['field'] == 'id'
def test_update_product_incorrect_expiration_date_should_raise_400(self, product_request, client):
product_request['expiration_date'] = '2020-04-23T18:25:43Z'
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'error' in response_dict
assert response_dict['field'] == 'expiration_date'
def test_update_product_not_enough_categories_should_raise_400(self, product_request, client):
if MIN_CATEGORIES_COUNT == 0:
assert True
return
product_request['categories'] = []
response = client.put(url_for("products.update_product", id=1), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'validation_error' in response_dict
def test_update_product_too_many_categories_should_raise_400(self, product_request, client):
product_request['categories'] = list(range(MAX_CATEGORIES_COUNT + 1))
response = client.put(url_for("products.update_product", id=1), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 400
assert 'validation_error' in response_dict
def test_delete_product_that_not_exists_should_raise_404(self, client):
response = client.delete(url_for("products.delete_product", id=1))
response_dict = json.loads(response.data)
assert response.status_code == 404
assert 'error' in response_dict
assert response_dict['field'] == 'id'
def test_get_product_that_not_exists_should_raise_404(self, client):
response = client.get(url_for("products.get_product", id=1))
response_dict = json.loads(response.data)
assert response.status_code == 404
assert 'error' in response_dict
assert response_dict['field'] == 'id'
def test_create_product_just_do_it(self, db, product_request, client):
brand = BrandFactory()
category = CategoryFactory()
db.session.commit()
db.session.refresh(category)
db.session.refresh(brand)
product_request['brand_id'] = brand.id
product_request['categories'] = [category.id]
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 200
assert 'validation_error' not in response_dict
assert 'error' not in response_dict
assert response_dict['id'] is not None
assert response_dict['name'] == product_request['name']
assert response_dict['brand']['id'] == brand.id
assert response_dict['categories'][0]['id'] == category.id
def test_create_product_should_became_featured_if_rating_more_than_8(self, db, product_request, client):
brand = BrandFactory()
category = CategoryFactory()
db.session.commit()
db.session.refresh(category)
db.session.refresh(brand)
product_request['brand_id'] = brand.id
product_request['categories'] = [category.id]
product_request['rating'] = 8.5
response = client.post(url_for("products.create_product"), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 200
assert 'validation_error' not in response_dict
assert 'error' not in response_dict
assert response_dict['featured'] == True
def test_update_product_just_do_it(self, db, product_request, client):
product, brand, category = self.create_product(db)
product_request['brand_id'] = brand.id
product_request['categories'] = [category.id]
response = client.put(url_for("products.update_product", id=product.id), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 200
assert 'validation_error' not in response_dict
assert 'error' not in response_dict
assert response_dict['id']
assert response_dict['name'] == product_request['name']
assert response_dict['items_in_stock'] == product_request['items_in_stock']
assert response_dict['rating'] == product_request['rating']
def test_update_product_should_became_featured_if_rating_more_than_8(self, db, product_request, client):
product, brand, category = self.create_product(db)
product_request['brand_id'] = brand.id
product_request['categories'] = [category.id]
product_request['rating'] = 8.5
response = client.put(url_for("products.update_product", id=product.id), json=product_request)
response_dict = json.loads(response.data)
assert response.status_code == 200
assert 'validation_error' not in response_dict
assert 'error' not in response_dict
assert response_dict['featured'] == True
def test_get_product_just_do_it(self, db, client):
product, brand, category = self.create_product(db)
response = client.get(url_for("products.get_product", id=product.id))
response_dict = json.loads(response.data)
needed_product = Product.query.get(product.id)
assert response.status_code == 200
assert response_dict['id'] == product.id
assert needed_product.id == product.id
def test_delete_product_just_do_it(self, db, client):
product, brand, category = self.create_product(db)
response = client.delete(url_for("products.get_product", id=product.id))
response_dict = json.loads(response.data)
removed_product = Product.query.get(product.id)
assert response.status_code == 200
assert response_dict['id'] == product.id
assert removed_product is None
def create_product(self, db):
brand = BrandFactory()
category = CategoryFactory()
db.session.commit()
db.session.refresh(category)
db.session.refresh(brand)
product = ProductFactory(brand=brand, categories=[category])
db.session.commit()
db.session.refresh(product)
return product, brand, category
| 40.556122
| 105
| 0.777582
| 1,090
| 7,949
| 5.380734
| 0.089908
| 0.106394
| 0.042967
| 0.06087
| 0.88815
| 0.87792
| 0.868201
| 0.868201
| 0.868201
| 0.838704
| 0
| 0.017753
| 0.114228
| 7,949
| 195
| 106
| 40.764103
| 0.815225
| 0
| 0
| 0.725
| 0
| 0
| 0.117247
| 0.040257
| 0
| 0
| 0
| 0
| 0.3625
| 1
| 0.1125
| false
| 0
| 0.025
| 0
| 0.1625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8206bfc3dabc104e0f673f323914d0a2238e071
| 4,560
|
py
|
Python
|
cities_light/south_migrations/0001_initial.py
|
suquant/django-cities-light
|
786852c8372f24e6f05c9c9b2e03e12873a222b8
|
[
"MIT"
] | 4
|
2015-10-05T09:20:35.000Z
|
2019-07-30T17:47:03.000Z
|
cities_light/south_migrations/0001_initial.py
|
suquant/django-cities-light
|
786852c8372f24e6f05c9c9b2e03e12873a222b8
|
[
"MIT"
] | 1
|
2020-01-25T13:02:17.000Z
|
2020-01-25T13:02:17.000Z
|
cities_light/south_migrations/0001_initial.py
|
suquant/django-cities-light
|
786852c8372f24e6f05c9c9b2e03e12873a222b8
|
[
"MIT"
] | 1
|
2018-10-12T16:16:01.000Z
|
2018-10-12T16:16:01.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Country'
db.create_table('cities_light_country', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=200)),
('name_ascii', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('slug', self.gf('autoslug.fields.AutoSlugField')(unique_with=(), max_length=50, populate_from=None)),
('code2', self.gf('django.db.models.fields.CharField')(max_length=2, unique=True, null=True, blank=True)),
('code3', self.gf('django.db.models.fields.CharField')(max_length=3, unique=True, null=True, blank=True)),
('continent', self.gf('django.db.models.fields.CharField')(max_length=2, db_index=True)),
('tld', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=5, blank=True)),
('geoname_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
))
db.send_create_signal('cities_light', ['Country'])
# Adding model 'City'
db.create_table('cities_light_city', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('name_ascii', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('slug', self.gf('autoslug.fields.AutoSlugField')(unique_with=(), max_length=50, populate_from=None)),
('geoname_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cities_light.Country'])),
))
db.send_create_signal('cities_light', ['City'])
# Adding unique constraint on 'City', fields ['country', 'name']
db.create_unique('cities_light_city', ['country_id', 'name'])
def backwards(self, orm):
# Removing unique constraint on 'City', fields ['country', 'name']
db.delete_unique('cities_light_city', ['country_id', 'name'])
# Deleting model 'Country'
db.delete_table('cities_light_country')
# Deleting model 'City'
db.delete_table('cities_light_city')
models = {
'cities_light.city': {
'Meta': {'ordering': "['name']", 'unique_together': "(('country', 'name'),)", 'object_name': 'City'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cities_light.Country']"}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None'})
},
'cities_light.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'code2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'code3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'continent': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None'}),
'tld': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '5', 'blank': 'True'})
}
}
complete_apps = ['cities_light']
| 60.8
| 135
| 0.599342
| 534
| 4,560
| 4.958802
| 0.142322
| 0.081571
| 0.137462
| 0.196375
| 0.820997
| 0.784743
| 0.760574
| 0.732628
| 0.701662
| 0.699396
| 0
| 0.012372
| 0.184649
| 4,560
| 75
| 136
| 60.8
| 0.699839
| 0.052412
| 0
| 0.285714
| 0
| 0
| 0.449235
| 0.238989
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.071429
| 0
| 0.160714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ef7529ed5d24c43053fdfa2c6d5bdc00ceb0c63
| 4,977
|
py
|
Python
|
tests/expectations/core/test_expect_table_row_count_to_be_between.py
|
OmriBromberg/great_expectations
|
60eb81ebfb08fef5d37d55c316dc962928beb165
|
[
"Apache-2.0"
] | 6,451
|
2017-09-11T16:32:53.000Z
|
2022-03-31T23:27:49.000Z
|
tests/expectations/core/test_expect_table_row_count_to_be_between.py
|
OmriBromberg/great_expectations
|
60eb81ebfb08fef5d37d55c316dc962928beb165
|
[
"Apache-2.0"
] | 3,892
|
2017-09-08T18:57:50.000Z
|
2022-03-31T23:15:20.000Z
|
tests/expectations/core/test_expect_table_row_count_to_be_between.py
|
OmriBromberg/great_expectations
|
60eb81ebfb08fef5d37d55c316dc962928beb165
|
[
"Apache-2.0"
] | 1,023
|
2017-09-08T15:22:05.000Z
|
2022-03-31T21:17:08.000Z
|
from great_expectations.core.batch import BatchRequest, RuntimeBatchRequest
from great_expectations.core.expectation_validation_result import (
ExpectationValidationResult,
)
from great_expectations.data_context import DataContext
def test_expect_table_row_count_to_be_between_runtime_custom_query_no_temp_table_sa(
titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled
batch_request = RuntimeBatchRequest(
datasource_name="my_sqlite_db_datasource",
data_connector_name="default_runtime_data_connector_name",
data_asset_name="titanic",
runtime_parameters={"query": "select * from titanic"},
batch_identifiers={"default_identifier_name": "test_identifier"},
batch_spec_passthrough={"create_temp_table": False},
)
validator = context.get_validator(
batch_request=batch_request,
create_expectation_suite_with_name="test",
)
results = validator.expect_table_row_count_to_be_between(
min_value=100, max_value=2000
)
assert results == ExpectationValidationResult(
success=True,
result={"observed_value": 1313},
meta={},
expectation_config={
"kwargs": {
"min_value": 100,
"max_value": 2000,
"batch_id": "a47a711a9984cb2a482157adf54c3cb6",
},
"ge_cloud_id": None,
"meta": {},
"expectation_type": "expect_table_row_count_to_be_between",
},
exception_info={
"raised_exception": False,
"exception_traceback": None,
"exception_message": None,
},
)
def test_expect_table_row_count_to_be_between_runtime_custom_query_with_where_no_temp_table_sa(
titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled
batch_request = RuntimeBatchRequest(
datasource_name="my_sqlite_db_datasource",
data_connector_name="default_runtime_data_connector_name",
data_asset_name="titanic",
runtime_parameters={"query": "select * from titanic where sexcode = 1"},
batch_identifiers={"default_identifier_name": "test_identifier"},
batch_spec_passthrough={"create_temp_table": False},
)
validator = context.get_validator(
batch_request=batch_request,
create_expectation_suite_with_name="test",
)
results = validator.expect_table_row_count_to_be_between(
min_value=100, max_value=2000
)
assert results == ExpectationValidationResult(
success=True,
result={"observed_value": 462},
meta={},
expectation_config={
"kwargs": {
"min_value": 100,
"max_value": 2000,
"batch_id": "a47a711a9984cb2a482157adf54c3cb6",
},
"ge_cloud_id": None,
"meta": {},
"expectation_type": "expect_table_row_count_to_be_between",
},
exception_info={
"raised_exception": False,
"exception_traceback": None,
"exception_message": None,
},
)
def test_expect_table_row_count_to_be_between_no_temp_table_sa(
titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_v013_multi_datasource_multi_execution_engine_data_context_with_checkpoints_v1_with_empty_store_stats_enabled
batch_request = BatchRequest(
datasource_name="my_sqlite_db_datasource",
data_connector_name="default_inferred_data_connector_name",
data_asset_name="titanic",
batch_spec_passthrough={"create_temp_table": False},
)
validator = context.get_validator(
batch_request=batch_request,
create_expectation_suite_with_name="test",
)
results = validator.expect_table_row_count_to_be_between(
min_value=100, max_value=2000
)
assert results == ExpectationValidationResult(
success=True,
result={"observed_value": 1313},
meta={},
expectation_config={
"kwargs": {
"min_value": 100,
"max_value": 2000,
"batch_id": "a47a711a9984cb2a482157adf54c3cb6",
},
"ge_cloud_id": None,
"meta": {},
"expectation_type": "expect_table_row_count_to_be_between",
},
exception_info={
"raised_exception": False,
"exception_traceback": None,
"exception_message": None,
},
)
| 39.188976
| 143
| 0.687764
| 520
| 4,977
| 6
| 0.176923
| 0.031731
| 0.040385
| 0.054808
| 0.921154
| 0.921154
| 0.921154
| 0.909295
| 0.909295
| 0.909295
| 0
| 0.036078
| 0.231465
| 4,977
| 126
| 144
| 39.5
| 0.779608
| 0
| 0
| 0.716667
| 0
| 0
| 0.200121
| 0.085393
| 0
| 0
| 0
| 0
| 0.025
| 1
| 0.025
| false
| 0.025
| 0.025
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e12eb01e6fe956e7218020b30076064427c9af1
| 71
|
py
|
Python
|
topic2_Python_DataStructure_And_Funnction/from_import.py
|
AbnerCui/Python_Lectures
|
2004ed7901de9731c0715e25d4b63064369ea6ad
|
[
"MIT"
] | 2
|
2020-09-04T03:45:44.000Z
|
2020-09-16T05:49:58.000Z
|
topic2_Python_DataStructure_And_Funnction/from_import.py
|
AbnerCui/Python_Lectures
|
2004ed7901de9731c0715e25d4b63064369ea6ad
|
[
"MIT"
] | null | null | null |
topic2_Python_DataStructure_And_Funnction/from_import.py
|
AbnerCui/Python_Lectures
|
2004ed7901de9731c0715e25d4b63064369ea6ad
|
[
"MIT"
] | 7
|
2020-07-19T02:00:05.000Z
|
2021-05-29T07:41:48.000Z
|
from support import print_func
import using_name
print_func("Jack")
| 10.142857
| 30
| 0.802817
| 11
| 71
| 4.909091
| 0.727273
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 71
| 6
| 31
| 11.833333
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
3e6b33c13e3b9f8c5144ed8326480f01d1499ac2
| 93
|
py
|
Python
|
react_native.py
|
fellipecaetano/run-ios
|
cf38a36b22cbdfad680822c1d0eb188b3e2a5064
|
[
"MIT"
] | null | null | null |
react_native.py
|
fellipecaetano/run-ios
|
cf38a36b22cbdfad680822c1d0eb188b3e2a5064
|
[
"MIT"
] | null | null | null |
react_native.py
|
fellipecaetano/run-ios
|
cf38a36b22cbdfad680822c1d0eb188b3e2a5064
|
[
"MIT"
] | null | null | null |
import subprocess
def run_ios():
return subprocess.call('react-native run-ios'.split())
| 18.6
| 58
| 0.731183
| 13
| 93
| 5.153846
| 0.769231
| 0.179104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 93
| 4
| 59
| 23.25
| 0.82716
| 0
| 0
| 0
| 0
| 0
| 0.215054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e4a3788fa08e508712317b93061949d07a96d89d
| 8,607
|
py
|
Python
|
tests/utils/format_tests.py
|
mehrdad-shokri/retdec-regression-tests-framework
|
9c3edcd0a7bc292a0d5b5cbfb4315010c78d3bc3
|
[
"MIT"
] | 21
|
2017-12-12T20:38:43.000Z
|
2019-04-14T12:46:10.000Z
|
tests/utils/format_tests.py
|
mehrdad-shokri/retdec-regression-tests-framework
|
9c3edcd0a7bc292a0d5b5cbfb4315010c78d3bc3
|
[
"MIT"
] | 6
|
2018-01-06T13:32:23.000Z
|
2018-09-14T15:09:11.000Z
|
tests/utils/format_tests.py
|
mehrdad-shokri/retdec-regression-tests-framework
|
9c3edcd0a7bc292a0d5b5cbfb4315010c78d3bc3
|
[
"MIT"
] | 11
|
2017-12-12T20:38:46.000Z
|
2018-07-19T03:12:03.000Z
|
"""
Tests for the :mod:`regression_tests.utils.format` module.
"""
# The tests of format_age() and format_date() are taken from
# https://github.com/s3rvac/git-branch-viewer/blob/master/tests/format_tests.py
import datetime
import unittest
from regression_tests.utils.format import format_age
from regression_tests.utils.format import format_date
from regression_tests.utils.format import format_id
from regression_tests.utils.format import format_runtime
class FormatAgeWithNonNegativeAgesTests(unittest.TestCase):
"""Tests for `format_age()` with non-negative values."""
def test_returns_correctly_formatted_age_zero_seconds(self):
age = datetime.timedelta(seconds=0)
self.assertEqual(format_age(age), '0 seconds')
def test_returns_correctly_formatted_age_one_second(self):
age = datetime.timedelta(seconds=1)
self.assertEqual(format_age(age), '1 second')
def test_returns_correctly_formatted_age_two_seconds(self):
age = datetime.timedelta(seconds=2)
self.assertEqual(format_age(age), '2 seconds')
def test_returns_correctly_formatted_age_max_seconds(self):
age = datetime.timedelta(seconds=59)
self.assertEqual(format_age(age), '59 seconds')
def test_returns_correctly_formatted_age_one_minute(self):
age = datetime.timedelta(minutes=1)
self.assertEqual(format_age(age), '1 minute')
def test_returns_correctly_formatted_age_one_minute_and_some_seconds(self):
age = datetime.timedelta(minutes=1, seconds=2)
self.assertEqual(format_age(age), '1 minute')
def test_returns_correctly_formatted_age_two_minutes(self):
age = datetime.timedelta(minutes=2)
self.assertEqual(format_age(age), '2 minutes')
def test_returns_correctly_formatted_age_two_minutes_and_some_seconds(self):
age = datetime.timedelta(minutes=2, seconds=5)
self.assertEqual(format_age(age), '2 minutes')
def test_returns_correctly_formatted_age_max_minutes(self):
age = datetime.timedelta(minutes=59, seconds=59)
self.assertEqual(format_age(age), '59 minutes')
def test_returns_correctly_formatted_age_one_hour(self):
age = datetime.timedelta(hours=1)
self.assertEqual(format_age(age), '1 hour')
def test_returns_correctly_formatted_age_one_hour_and_some_minutes(self):
age = datetime.timedelta(hours=1, minutes=5)
self.assertEqual(format_age(age), '1 hour')
def test_returns_correctly_formatted_age_two_hours(self):
age = datetime.timedelta(hours=2)
self.assertEqual(format_age(age), '2 hours')
def test_returns_correctly_formatted_age_two_hours_and_some_minutes(self):
age = datetime.timedelta(hours=2, minutes=5)
self.assertEqual(format_age(age), '2 hours')
def test_returns_correctly_formatted_age_max_hours(self):
age = datetime.timedelta(hours=23, minutes=59, seconds=59)
self.assertEqual(format_age(age), '23 hours')
def test_returns_correctly_formatted_age_one_day(self):
age = datetime.timedelta(days=1)
self.assertEqual(format_age(age), '1 day')
def test_returns_correctly_formatted_age_one_day_and_some_hours(self):
age = datetime.timedelta(days=1, hours=5)
self.assertEqual(format_age(age), '1 day')
def test_returns_correctly_formatted_age_two_days(self):
age = datetime.timedelta(days=2)
self.assertEqual(format_age(age), '2 days')
def test_returns_correctly_formatted_age_two_days_and_some_hours(self):
age = datetime.timedelta(days=2, hours=5)
self.assertEqual(format_age(age), '2 days')
class FormatAgeWithNegativeAgesTests(unittest.TestCase):
"""Tests for `format_age()` with negative values."""
def test_returns_correctly_formatted_age_minus_one_second(self):
age = datetime.timedelta(seconds=-1)
self.assertEqual(format_age(age), '-1 second')
def test_returns_correctly_formatted_age_minus_two_seconds(self):
age = datetime.timedelta(seconds=-2)
self.assertEqual(format_age(age), '-2 seconds')
def test_returns_correctly_formatted_age_minus_max_seconds(self):
age = datetime.timedelta(seconds=-59)
self.assertEqual(format_age(age), '-59 seconds')
def test_returns_correctly_formatted_age_minus_one_minute(self):
age = datetime.timedelta(minutes=-1)
self.assertEqual(format_age(age), '-1 minute')
def test_returns_correctly_formatted_age_minus_one_minute_and_some_seconds(self):
age = datetime.timedelta(minutes=-1, seconds=-2)
self.assertEqual(format_age(age), '-1 minute')
def test_returns_correctly_formatted_age_minus_two_minutes(self):
age = datetime.timedelta(minutes=-2)
self.assertEqual(format_age(age), '-2 minutes')
def test_returns_correctly_formatted_age_minus_two_minutes_and_some_seconds(self):
age = datetime.timedelta(minutes=-2, seconds=-5)
self.assertEqual(format_age(age), '-2 minutes')
def test_returns_correctly_formatted_age_minus_max_minutes(self):
age = datetime.timedelta(minutes=-59, seconds=-59)
self.assertEqual(format_age(age), '-59 minutes')
def test_returns_correctly_formatted_age_minus_one_hour(self):
age = datetime.timedelta(hours=-1)
self.assertEqual(format_age(age), '-1 hour')
def test_returns_correctly_formatted_age_minus_one_hour_and_some_minutes(self):
age = datetime.timedelta(hours=-1, minutes=-5)
self.assertEqual(format_age(age), '-1 hour')
def test_returns_correctly_formatted_age_minus_two_hours(self):
age = datetime.timedelta(hours=-2)
self.assertEqual(format_age(age), '-2 hours')
def test_returns_correctly_formatted_age_minus_two_hours_and_some_minutes(self):
age = datetime.timedelta(hours=-2, minutes=-5)
self.assertEqual(format_age(age), '-2 hours')
def test_returns_correctly_formatted_age_minus_max_hours(self):
age = datetime.timedelta(hours=-23, minutes=-59, seconds=-59)
self.assertEqual(format_age(age), '-23 hours')
def test_returns_correctly_formatted_age_minus_one_day(self):
age = datetime.timedelta(days=-1)
self.assertEqual(format_age(age), '-1 day')
def test_returns_correctly_formatted_age_minus_one_day_and_some_hours(self):
age = datetime.timedelta(days=-1, hours=-5)
self.assertEqual(format_age(age), '-1 day')
def test_returns_correctly_formatted_age_minus_two_days(self):
age = datetime.timedelta(days=-2)
self.assertEqual(format_age(age), '-2 days')
def test_returns_correctly_formatted_age_two_days_and_some_hours(self):
age = datetime.timedelta(days=-2, hours=-5)
self.assertEqual(format_age(age), '-2 days')
class FormatDateTests(unittest.TestCase):
"""Tests for `format_date()`."""
def test_returns_dash_when_date_is_not_given(self):
self.assertEqual(format_date(None), '-')
def test_returns_correctly_formatted_date_when_date_is_given(self):
date = datetime.datetime(2014, 6, 7, 14, 25, 2)
self.assertEqual(format_date(date), '2014-06-07 14:25:02')
class FormatIdTests(unittest.TestCase):
"""Tests for `format_id()`."""
def test_empty_string_is_converted_to_underscore(self):
self.assertEqual(format_id(''), '_')
def test_string_that_is_already_id_is_left_unchanged(self):
STRING = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-'
self.assertEqual(format_id(STRING), STRING)
def test_invalid_characters_are_replaced_with_dash(self):
self.assertEqual(format_id(' '), '-')
self.assertEqual(format_id('%'), '-')
self.assertEqual(format_id('@'), '-')
# And so on...
class FormatRuntimeTests(unittest.TestCase):
"""Tests for `format_runtime()`."""
def test_zero_seconds_is_formatted_correctly(self):
self.assertEqual(format_runtime(0), '0.00s')
def test_very_small_runtime_is_formatted_correctly(self):
self.assertEqual(format_runtime(0.001), '0.00s')
def test_half_second_is_formatted_correctly(self):
self.assertEqual(format_runtime(0.5), '0.50s')
def test_sixty_seconds_is_formatted_correctly(self):
self.assertEqual(format_runtime(60), '1m 0s')
def test_sixty_one_seconds_as_int_is_formatted_correctly(self):
self.assertEqual(format_runtime(61), '1m 1s')
def test_sixty_one_seconds_as_float_is_formatted_correctly(self):
self.assertEqual(format_runtime(61.0), '1m 1s')
| 40.408451
| 86
| 0.73022
| 1,141
| 8,607
| 5.15688
| 0.099036
| 0.122366
| 0.171312
| 0.140721
| 0.855201
| 0.824609
| 0.809993
| 0.768865
| 0.730795
| 0.671652
| 0
| 0.023697
| 0.161613
| 8,607
| 212
| 87
| 40.599057
| 0.791713
| 0.045196
| 0
| 0.183099
| 0
| 0
| 0.049028
| 0.007825
| 0
| 0
| 0
| 0
| 0.338028
| 1
| 0.323944
| false
| 0
| 0.042254
| 0
| 0.401408
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e4c62563acf98c116797b9a6f9efb0554908492c
| 2,833
|
py
|
Python
|
integration_tests/test_11_ves.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 4
|
2020-06-13T04:51:27.000Z
|
2021-01-06T15:00:51.000Z
|
integration_tests/test_11_ves.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 10
|
2021-09-20T15:42:47.000Z
|
2021-09-23T12:49:51.000Z
|
integration_tests/test_11_ves.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 8
|
2020-08-28T10:56:02.000Z
|
2022-02-11T17:06:03.000Z
|
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 Nokia
import pytest
import logging
import os
import requests
from onapsdk.configuration import settings
from onapsdk.utils.jinja import jinja_env
from onapsdk.ves.ves import Ves
from onapsdk.dmaap.dmaap import Dmaap
logging.basicConfig(level=os.environ.get("LOGLEVEL", "DEBUG"))
def reset_dmaap_mock():
requests.get("{}/reset".format(settings.DMAAP_URL))
@pytest.mark.integration
def test_should_send_event_to_ves():
# given
requests.post("{}/set_dmaap_address".format(settings.VES_URL), json={"DMAAP_MOCK": settings.DMAAP_URL})
event: str = jinja_env().get_template("ves_stnd_event.json.j2").render()
# when
response = Ves.send_event(
basic_auth={'username': 'sample1', 'password': 'sample1'},
json_event=event,
version="v7"
)
# then
assert response.status_code == 202
@pytest.mark.integration
def test_should_send_batch_event_to_ves():
# given
requests.post("{}/set_dmaap_address".format(settings.VES_URL), json={"DMAAP_MOCK": settings.DMAAP_URL})
event: str = jinja_env().get_template("ves7_batch_with_stndDefined_valid.json.j2").render()
# when
response = Ves.send_batch_event(
basic_auth={'username': 'sample1', 'password': 'sample1'},
json_event=event,
version="v7"
)
# then
assert response.status_code == 202
@pytest.mark.integration
def test_should_send_event_to_ves_and_dmaap():
# given
requests.post("{}/set_dmaap_address".format(settings.VES_URL), json={"DMAAP_MOCK": settings.DMAAP_URL})
event: str = jinja_env().get_template("ves_stnd_event.json.j2").render()
# when
reset_dmaap_mock()
response = Ves.send_event(
basic_auth={'username': 'sample1', 'password': 'sample1'},
json_event=event,
version="v7"
)
# then
assert response.status_code == 202
events = Dmaap.get_events_for_topic("fault",
basic_auth={'username': 'dcae@dcae.onap.org', 'password': 'demo123456!'})
assert len(events) == 1
@pytest.mark.integration
def test_should_send_batch_event_to_ves_and_dmaap():
# given
requests.post("{}/set_dmaap_address".format(settings.VES_URL), json={"DMAAP_MOCK": settings.DMAAP_URL})
event: str = jinja_env().get_template("ves7_batch_with_stndDefined_valid.json.j2").render()
# when
reset_dmaap_mock()
response = Ves.send_batch_event(
basic_auth={'username': 'sample1', 'password': 'sample1'},
json_event=event,
version="v7"
)
# then
assert response.status_code == 202
events = Dmaap.get_events_for_topic("fault",
basic_auth={'username': 'dcae@dcae.onap.org', 'password': 'demo123456!'})
assert len(events) == 2
| 28.616162
| 113
| 0.674903
| 358
| 2,833
| 5.061453
| 0.226257
| 0.034768
| 0.056291
| 0.05298
| 0.817881
| 0.817881
| 0.817881
| 0.817881
| 0.817881
| 0.817881
| 0
| 0.021777
| 0.189552
| 2,833
| 98
| 114
| 28.908163
| 0.767422
| 0.042358
| 0
| 0.644068
| 0
| 0
| 0.183469
| 0.046701
| 0
| 0
| 0
| 0
| 0.101695
| 1
| 0.084746
| false
| 0.101695
| 0.135593
| 0
| 0.220339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
903a0d672750a9f97405c8c7c3d9f6abdbfe0249
| 149,808
|
py
|
Python
|
teaser/data/bindings/opengis/citygml/raw/base.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | 1
|
2018-10-22T07:21:15.000Z
|
2018-10-22T07:21:15.000Z
|
teaser/data/bindings/opengis/citygml/raw/base.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | null | null | null |
teaser/data/bindings/opengis/citygml/raw/base.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | null | null | null |
# ./pyxb/bundles/opengis/citygml/raw/base.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:2e547b4e8cb671380833d3ecf742650e1bb66b5d
# Generated 2017-01-09 16:11:32.922479 by PyXB version 1.2.5 using Python 3.5.2.final.0
# Namespace http://www.opengis.net/citygml/2.0
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:e75cce9c-d67d-11e6-8d7b-100ba9a189d0')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.5'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# Import bindings for namespaces imported into schema
import pyxb.bundles.common.xlink
import teaser.data.bindings.opengis.raw.gml
import pyxb.binding.datatypes
import teaser.data.bindings.opengis.misc.raw.xAL
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.opengis.net/citygml/2.0', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_gml = teaser.data.bindings.opengis.raw.gml.Namespace
_Namespace_gml.configureCategories(['typeBinding', 'elementBinding'])
_Namespace = pyxb.bundles.common.xlink.Namespace
_Namespace.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_xAL = teaser.data.bindings.opengis.misc.raw.xAL.Namespace
_Namespace_xAL.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://www.opengis.net/citygml/2.0}RelativeToTerrainType
class RelativeToTerrainType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""Specifies the spatial relation of a CityObject realativ to terrain in a qualitative way. The values of
this type are defined in the XML file RelativeToTerrainType.xml, according to the dictionary concept of
GML3."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RelativeToTerrainType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 155, 1)
_Documentation = 'Specifies the spatial relation of a CityObject realativ to terrain in a qualitative way. The values of\n\t\t\t\tthis type are defined in the XML file RelativeToTerrainType.xml, according to the dictionary concept of\n\t\t\t\tGML3.'
RelativeToTerrainType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=RelativeToTerrainType, enum_prefix=None)
RelativeToTerrainType.entirelyAboveTerrain = RelativeToTerrainType._CF_enumeration.addEnumeration(unicode_value='entirelyAboveTerrain', tag='entirelyAboveTerrain')
RelativeToTerrainType.substantiallyAboveTerrain = RelativeToTerrainType._CF_enumeration.addEnumeration(unicode_value='substantiallyAboveTerrain', tag='substantiallyAboveTerrain')
RelativeToTerrainType.substantiallyAboveAndBelowTerrain = RelativeToTerrainType._CF_enumeration.addEnumeration(unicode_value='substantiallyAboveAndBelowTerrain', tag='substantiallyAboveAndBelowTerrain')
RelativeToTerrainType.substantiallyBelowTerrain = RelativeToTerrainType._CF_enumeration.addEnumeration(unicode_value='substantiallyBelowTerrain', tag='substantiallyBelowTerrain')
RelativeToTerrainType.entirelyBelowTerrain = RelativeToTerrainType._CF_enumeration.addEnumeration(unicode_value='entirelyBelowTerrain', tag='entirelyBelowTerrain')
RelativeToTerrainType._InitializeFacetMap(RelativeToTerrainType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'RelativeToTerrainType', RelativeToTerrainType)
_module_typeBindings.RelativeToTerrainType = RelativeToTerrainType
# Atomic simple type: {http://www.opengis.net/citygml/2.0}RelativeToWaterType
class RelativeToWaterType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""Specifies the spatial relation of a CityObject realativ to the water surface in a qualitative way. The
values of this type are defined in the XML file RelativeToTerrainType.xml, according to the dictionary concept of
GML3."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RelativeToWaterType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 170, 1)
_Documentation = 'Specifies the spatial relation of a CityObject realativ to the water surface in a qualitative way. The\n\t\t\t\tvalues of this type are defined in the XML file RelativeToTerrainType.xml, according to the dictionary concept of\n\t\t\t\tGML3.'
RelativeToWaterType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=RelativeToWaterType, enum_prefix=None)
RelativeToWaterType.entirelyAboveWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='entirelyAboveWaterSurface', tag='entirelyAboveWaterSurface')
RelativeToWaterType.substantiallyAboveWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='substantiallyAboveWaterSurface', tag='substantiallyAboveWaterSurface')
RelativeToWaterType.substantiallyAboveAndBelowWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='substantiallyAboveAndBelowWaterSurface', tag='substantiallyAboveAndBelowWaterSurface')
RelativeToWaterType.substantiallyBelowWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='substantiallyBelowWaterSurface', tag='substantiallyBelowWaterSurface')
RelativeToWaterType.entirelyBelowWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='entirelyBelowWaterSurface', tag='entirelyBelowWaterSurface')
RelativeToWaterType.temporarilyAboveAndBelowWaterSurface = RelativeToWaterType._CF_enumeration.addEnumeration(unicode_value='temporarilyAboveAndBelowWaterSurface', tag='temporarilyAboveAndBelowWaterSurface')
RelativeToWaterType._InitializeFacetMap(RelativeToWaterType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'RelativeToWaterType', RelativeToWaterType)
_module_typeBindings.RelativeToWaterType = RelativeToWaterType
# Atomic simple type: {http://www.opengis.net/citygml/2.0}doubleBetween0and1
class doubleBetween0and1 (pyxb.binding.datatypes.double):
"""Type for values, which are greater or equal than 0 and less or equal than 1. Used for color encoding, for
example. """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'doubleBetween0and1')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 276, 1)
_Documentation = 'Type for values, which are greater or equal than 0 and less or equal than 1. Used for color encoding, for\n\t\t\t\texample. '
doubleBetween0and1._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=doubleBetween0and1, value=pyxb.binding.datatypes.double(1.0))
doubleBetween0and1._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=doubleBetween0and1, value=pyxb.binding.datatypes.double(0.0))
doubleBetween0and1._InitializeFacetMap(doubleBetween0and1._CF_maxInclusive,
doubleBetween0and1._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', 'doubleBetween0and1', doubleBetween0and1)
_module_typeBindings.doubleBetween0and1 = doubleBetween0and1
# List simple type: {http://www.opengis.net/citygml/2.0}TransformationMatrix4x4Type
# superclasses teaser.data.bindings.opengis.raw.gml.doubleList
class TransformationMatrix4x4Type (pyxb.binding.basis.STD_list):
"""Used for implicit geometries. The Transformation matrix is a 4 by 4 matrix, thus it must be a list with 16
items. The order the matrix element are represented is row-major, i. e. the first 4 elements represent the first row, the
fifth to the eight element the second row,... """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TransformationMatrix4x4Type')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 295, 1)
_Documentation = 'Used for implicit geometries. The Transformation matrix is a 4 by 4 matrix, thus it must be a list with 16\n\t\t\t\titems. The order the matrix element are represented is row-major, i. e. the first 4 elements represent the first row, the\n\t\t\t\tfifth to the eight element the second row,... '
_ItemType = pyxb.binding.datatypes.double
TransformationMatrix4x4Type._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(16))
TransformationMatrix4x4Type._InitializeFacetMap(TransformationMatrix4x4Type._CF_length)
Namespace.addCategoryObject('typeBinding', 'TransformationMatrix4x4Type', TransformationMatrix4x4Type)
_module_typeBindings.TransformationMatrix4x4Type = TransformationMatrix4x4Type
# List simple type: {http://www.opengis.net/citygml/2.0}TransformationMatrix2x2Type
# superclasses teaser.data.bindings.opengis.raw.gml.doubleList
class TransformationMatrix2x2Type (pyxb.binding.basis.STD_list):
"""Used for georeferencing. The Transformation matrix is a 2 by 2 matrix, thus it must be a list with 4
items. The order the matrix element are represented is row-major, i. e. the first 2 elements represent the first row, the
fifth to the eight element the second row,... """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TransformationMatrix2x2Type')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 306, 1)
_Documentation = 'Used for georeferencing. The Transformation matrix is a 2 by 2 matrix, thus it must be a list with 4\n\t\t\t\titems. The order the matrix element are represented is row-major, i. e. the first 2 elements represent the first row, the\n\t\t\t\tfifth to the eight element the second row,... '
_ItemType = pyxb.binding.datatypes.double
TransformationMatrix2x2Type._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(4))
TransformationMatrix2x2Type._InitializeFacetMap(TransformationMatrix2x2Type._CF_length)
Namespace.addCategoryObject('typeBinding', 'TransformationMatrix2x2Type', TransformationMatrix2x2Type)
_module_typeBindings.TransformationMatrix2x2Type = TransformationMatrix2x2Type
# List simple type: {http://www.opengis.net/citygml/2.0}TransformationMatrix3x4Type
# superclasses teaser.data.bindings.opengis.raw.gml.doubleList
class TransformationMatrix3x4Type (pyxb.binding.basis.STD_list):
"""Used for texture parameterization. The Transformation matrix is a 3 by 4 matrix, thus it must be a list
with 12 items. The order the matrix element are represented is row-major, i. e. the first 4 elements represent the first
row, the fifth to the eight element the second row,... """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'TransformationMatrix3x4Type')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 317, 1)
_Documentation = 'Used for texture parameterization. The Transformation matrix is a 3 by 4 matrix, thus it must be a list\n\t\t\t\twith 12 items. The order the matrix element are represented is row-major, i. e. the first 4 elements represent the first\n\t\t\t\trow, the fifth to the eight element the second row,... '
_ItemType = pyxb.binding.datatypes.double
TransformationMatrix3x4Type._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(12))
TransformationMatrix3x4Type._InitializeFacetMap(TransformationMatrix3x4Type._CF_length)
Namespace.addCategoryObject('typeBinding', 'TransformationMatrix3x4Type', TransformationMatrix3x4Type)
_module_typeBindings.TransformationMatrix3x4Type = TransformationMatrix3x4Type
# Atomic simple type: {http://www.opengis.net/citygml/2.0}integerBetween0and4
class integerBetween0and4 (pyxb.binding.datatypes.integer):
"""Type for integer values, which are greater or equal than 0 and less or equal than 4. Used for encoding of
the LOD number. """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'integerBetween0and4')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 328, 1)
_Documentation = 'Type for integer values, which are greater or equal than 0 and less or equal than 4. Used for encoding of\n\t\t\t\tthe LOD number. '
integerBetween0and4._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=integerBetween0and4, value=pyxb.binding.datatypes.integer(4))
integerBetween0and4._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=integerBetween0and4, value=pyxb.binding.datatypes.integer(0))
integerBetween0and4._InitializeFacetMap(integerBetween0and4._CF_maxInclusive,
integerBetween0and4._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', 'integerBetween0and4', integerBetween0and4)
_module_typeBindings.integerBetween0and4 = integerBetween0and4
# List simple type: {http://www.opengis.net/citygml/2.0}doubleBetween0and1List
# superclasses pyxb.binding.datatypes.anySimpleType
class doubleBetween0and1List (pyxb.binding.basis.STD_list):
"""List for double values, which are greater or equal than 0 and less or equal than 1. Used for color
encoding, for example. """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'doubleBetween0and1List')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 287, 1)
_Documentation = 'List for double values, which are greater or equal than 0 and less or equal than 1. Used for color\n\t\t\t\tencoding, for example. '
_ItemType = doubleBetween0and1
doubleBetween0and1List._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'doubleBetween0and1List', doubleBetween0and1List)
_module_typeBindings.doubleBetween0and1List = doubleBetween0and1List
# Complex type {http://www.opengis.net/citygml/2.0}CityModelType with content type ELEMENT_ONLY
class CityModelType (teaser.data.bindings.opengis.raw.gml.AbstractFeatureCollectionType):
"""Type describing the "root" element of any city model file. It is a collection whose members are restricted
to be features of a city model. All features are included as cityObjectMember. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CityModelType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 27, 1)
_ElementMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureCollectionType._ElementMap.copy()
_AttributeMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureCollectionType._AttributeMap.copy()
# Base type is teaser.data.bindings.opengis.raw.gml.AbstractFeatureCollectionType
# Element {http://www.opengis.net/citygml/2.0}_GenericApplicationPropertyOfCityModel uses Python identifier GenericApplicationPropertyOfCityModel
__GenericApplicationPropertyOfCityModel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityModel'), 'GenericApplicationPropertyOfCityModel', '__httpwww_opengis_netcitygml2_0_CityModelType_httpwww_opengis_netcitygml2_0_GenericApplicationPropertyOfCityModel', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 43, 1), )
GenericApplicationPropertyOfCityModel = property(__GenericApplicationPropertyOfCityModel.value, __GenericApplicationPropertyOfCityModel.set, None, None)
# Element boundedBy ({http://www.opengis.net/gml}boundedBy) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element featureMember ({http://www.opengis.net/gml}featureMember) inherited from {http://www.opengis.net/gml}AbstractFeatureCollectionType
# Element featureMembers ({http://www.opengis.net/gml}featureMembers) inherited from {http://www.opengis.net/gml}AbstractFeatureCollectionType
# Element location ({http://www.opengis.net/gml}location) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element metaDataProperty ({http://www.opengis.net/gml}metaDataProperty) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element name ({http://www.opengis.net/gml}name) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element description ({http://www.opengis.net/gml}description) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Attribute id inherited from {http://www.opengis.net/gml}AbstractGMLType
_ElementMap.update({
__GenericApplicationPropertyOfCityModel.name() : __GenericApplicationPropertyOfCityModel
})
_AttributeMap.update({
})
_module_typeBindings.CityModelType = CityModelType
Namespace.addCategoryObject('typeBinding', 'CityModelType', CityModelType)
# Complex type {http://www.opengis.net/citygml/2.0}AbstractCityObjectType with content type ELEMENT_ONLY
class AbstractCityObjectType (teaser.data.bindings.opengis.raw.gml.AbstractFeatureType):
"""Type describing the abstract superclass of most CityGML features. Its purpose is to provide a creation and
a termination date as well as a reference to corresponding objects in other information systems. A generalization relation
may be used to relate features, which represent the same real-world object in different Levels-of-Detail, i.e. a feature
and its generalized counterpart(s). The direction of this relation is from the feature to the corresponding generalized
feature."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AbstractCityObjectType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 51, 1)
_ElementMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureType._ElementMap.copy()
_AttributeMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureType._AttributeMap.copy()
# Base type is teaser.data.bindings.opengis.raw.gml.AbstractFeatureType
# Element {http://www.opengis.net/citygml/2.0}creationDate uses Python identifier creationDate
__creationDate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'creationDate'), 'creationDate', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0creationDate', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5), )
creationDate = property(__creationDate.value, __creationDate.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}terminationDate uses Python identifier terminationDate
__terminationDate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'terminationDate'), 'terminationDate', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0terminationDate', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5), )
terminationDate = property(__terminationDate.value, __terminationDate.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}externalReference uses Python identifier externalReference
__externalReference = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'externalReference'), 'externalReference', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0externalReference', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5), )
externalReference = property(__externalReference.value, __externalReference.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}generalizesTo uses Python identifier generalizesTo
__generalizesTo = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'generalizesTo'), 'generalizesTo', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0generalizesTo', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5), )
generalizesTo = property(__generalizesTo.value, __generalizesTo.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}relativeToTerrain uses Python identifier relativeToTerrain
__relativeToTerrain = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'relativeToTerrain'), 'relativeToTerrain', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0relativeToTerrain', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5), )
relativeToTerrain = property(__relativeToTerrain.value, __relativeToTerrain.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}relativeToWater uses Python identifier relativeToWater
__relativeToWater = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'relativeToWater'), 'relativeToWater', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0relativeToWater', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5), )
relativeToWater = property(__relativeToWater.value, __relativeToWater.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}_GenericApplicationPropertyOfCityObject uses Python identifier GenericApplicationPropertyOfCityObject
__GenericApplicationPropertyOfCityObject = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityObject'), 'GenericApplicationPropertyOfCityObject', '__httpwww_opengis_netcitygml2_0_AbstractCityObjectType_httpwww_opengis_netcitygml2_0_GenericApplicationPropertyOfCityObject', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 92, 1), )
GenericApplicationPropertyOfCityObject = property(__GenericApplicationPropertyOfCityObject.value, __GenericApplicationPropertyOfCityObject.set, None, None)
# Element boundedBy ({http://www.opengis.net/gml}boundedBy) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element location ({http://www.opengis.net/gml}location) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element metaDataProperty ({http://www.opengis.net/gml}metaDataProperty) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element name ({http://www.opengis.net/gml}name) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element description ({http://www.opengis.net/gml}description) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Attribute id inherited from {http://www.opengis.net/gml}AbstractGMLType
_ElementMap.update({
__creationDate.name() : __creationDate,
__terminationDate.name() : __terminationDate,
__externalReference.name() : __externalReference,
__generalizesTo.name() : __generalizesTo,
__relativeToTerrain.name() : __relativeToTerrain,
__relativeToWater.name() : __relativeToWater,
__GenericApplicationPropertyOfCityObject.name() : __GenericApplicationPropertyOfCityObject
})
_AttributeMap.update({
})
_module_typeBindings.AbstractCityObjectType = AbstractCityObjectType
Namespace.addCategoryObject('typeBinding', 'AbstractCityObjectType', AbstractCityObjectType)
# Complex type {http://www.opengis.net/citygml/2.0}GeneralizationRelationType with content type ELEMENT_ONLY
class GeneralizationRelationType (pyxb.binding.basis.complexTypeDefinition):
"""Denotes the relation of a _CityObject to its corresponding _CityObject in higher LOD, i.e. to the
_CityObjects representing the same real world object in higher LOD. The GeneralizationRelationType element must either
carry a reference to a _CityObject object or contain a _CityObject object inline, but neither both nor none.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'GeneralizationRelationType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 117, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.opengis.net/citygml/2.0}_CityObject uses Python identifier CityObject
__CityObject = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, '_CityObject'), 'CityObject', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_opengis_netcitygml2_0_CityObject', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 74, 1), )
CityObject = property(__CityObject.value, __CityObject.set, None, None)
# Attribute {http://www.opengis.net/gml}remoteSchema uses Python identifier remoteSchema
__remoteSchema = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace_gml, 'remoteSchema'), 'remoteSchema', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_opengis_netgmlremoteSchema', pyxb.binding.datatypes.anyURI)
__remoteSchema._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 258, 1)
__remoteSchema._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 269, 2)
remoteSchema = property(__remoteSchema.value, __remoteSchema.set, None, 'Reference to an XML Schema fragment that specifies the content model of the propertys value. This is in conformance with the XML Schema Section 4.14 Referencing Schemas from Elsewhere.')
# Attribute {http://www.w3.org/1999/xlink}type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'type'), 'type', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinktype', pyxb.bundles.common.xlink.typeType, fixed=True, unicode_default='simple')
__type._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 29, 1)
__type._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 112, 2)
type = property(__type.value, __type.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}href uses Python identifier href
__href = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'href'), 'href', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinkhref', pyxb.bundles.common.xlink.hrefType)
__href._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 42, 1)
__href._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 113, 2)
href = property(__href.value, __href.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'role'), 'role', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinkrole', pyxb.bundles.common.xlink.roleType)
__role._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 48, 1)
__role._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 114, 2)
role = property(__role.value, __role.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}arcrole uses Python identifier arcrole
__arcrole = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'arcrole'), 'arcrole', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinkarcrole', pyxb.bundles.common.xlink.arcroleType)
__arcrole._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 56, 1)
__arcrole._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 115, 2)
arcrole = property(__arcrole.value, __arcrole.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}title uses Python identifier title
__title = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'title'), 'title', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinktitle', pyxb.bundles.common.xlink.titleAttrType)
__title._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 64, 1)
__title._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 116, 2)
title = property(__title.value, __title.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}show uses Python identifier show
__show = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'show'), 'show', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinkshow', pyxb.bundles.common.xlink.showType)
__show._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 70, 1)
__show._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 117, 2)
show = property(__show.value, __show.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}actuate uses Python identifier actuate
__actuate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'actuate'), 'actuate', '__httpwww_opengis_netcitygml2_0_GeneralizationRelationType_httpwww_w3_org1999xlinkactuate', pyxb.bundles.common.xlink.actuateType)
__actuate._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 82, 1)
__actuate._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 118, 2)
actuate = property(__actuate.value, __actuate.set, None, None)
_ElementMap.update({
__CityObject.name() : __CityObject
})
_AttributeMap.update({
__remoteSchema.name() : __remoteSchema,
__type.name() : __type,
__href.name() : __href,
__role.name() : __role,
__arcrole.name() : __arcrole,
__title.name() : __title,
__show.name() : __show,
__actuate.name() : __actuate
})
_module_typeBindings.GeneralizationRelationType = GeneralizationRelationType
Namespace.addCategoryObject('typeBinding', 'GeneralizationRelationType', GeneralizationRelationType)
# Complex type {http://www.opengis.net/citygml/2.0}ExternalReferenceType with content type ELEMENT_ONLY
class ExternalReferenceType (pyxb.binding.basis.complexTypeDefinition):
"""Type describing the reference to an corresponding object in an other information system, for example in
the german cadastre ALKIS, the german topographic information system or ATKIS, or the OS MasterMap. The reference consists
of the name of the external information system, represented by an URI, and the reference of the external object, given
either by a string or by an URI. If the informationSystem element is missing in the ExternalReference, the
ExternalObjectReference must be an URI, which contains an indication of the informationSystem."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ExternalReferenceType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 132, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.opengis.net/citygml/2.0}informationSystem uses Python identifier informationSystem
__informationSystem = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'informationSystem'), 'informationSystem', '__httpwww_opengis_netcitygml2_0_ExternalReferenceType_httpwww_opengis_netcitygml2_0informationSystem', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 141, 3), )
informationSystem = property(__informationSystem.value, __informationSystem.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}externalObject uses Python identifier externalObject
__externalObject = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'externalObject'), 'externalObject', '__httpwww_opengis_netcitygml2_0_ExternalReferenceType_httpwww_opengis_netcitygml2_0externalObject', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 142, 3), )
externalObject = property(__externalObject.value, __externalObject.set, None, None)
_ElementMap.update({
__informationSystem.name() : __informationSystem,
__externalObject.name() : __externalObject
})
_AttributeMap.update({
})
_module_typeBindings.ExternalReferenceType = ExternalReferenceType
Namespace.addCategoryObject('typeBinding', 'ExternalReferenceType', ExternalReferenceType)
# Complex type {http://www.opengis.net/citygml/2.0}ExternalObjectReferenceType with content type ELEMENT_ONLY
class ExternalObjectReferenceType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://www.opengis.net/citygml/2.0}ExternalObjectReferenceType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ExternalObjectReferenceType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 146, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.opengis.net/citygml/2.0}name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'name'), 'name', '__httpwww_opengis_netcitygml2_0_ExternalObjectReferenceType_httpwww_opengis_netcitygml2_0name', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 148, 3), )
name = property(__name.value, __name.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}uri uses Python identifier uri
__uri = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'uri'), 'uri', '__httpwww_opengis_netcitygml2_0_ExternalObjectReferenceType_httpwww_opengis_netcitygml2_0uri', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 149, 3), )
uri = property(__uri.value, __uri.set, None, None)
_ElementMap.update({
__name.name() : __name,
__uri.name() : __uri
})
_AttributeMap.update({
})
_module_typeBindings.ExternalObjectReferenceType = ExternalObjectReferenceType
Namespace.addCategoryObject('typeBinding', 'ExternalObjectReferenceType', ExternalObjectReferenceType)
# Complex type {http://www.opengis.net/citygml/2.0}AddressPropertyType with content type ELEMENT_ONLY
class AddressPropertyType (pyxb.binding.basis.complexTypeDefinition):
"""Denotes the relation of an _CityObject to its addresses. The AddressPropertyType element must either carry
a reference to an Address object or contain an Address object inline, but neither both nor none. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AddressPropertyType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 189, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.opengis.net/citygml/2.0}Address uses Python identifier Address
__Address = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Address'), 'Address', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_opengis_netcitygml2_0Address', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 217, 1), )
Address = property(__Address.value, __Address.set, None, None)
# Attribute {http://www.opengis.net/gml}remoteSchema uses Python identifier remoteSchema
__remoteSchema = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace_gml, 'remoteSchema'), 'remoteSchema', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_opengis_netgmlremoteSchema', pyxb.binding.datatypes.anyURI)
__remoteSchema._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 258, 1)
__remoteSchema._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 269, 2)
remoteSchema = property(__remoteSchema.value, __remoteSchema.set, None, 'Reference to an XML Schema fragment that specifies the content model of the propertys value. This is in conformance with the XML Schema Section 4.14 Referencing Schemas from Elsewhere.')
# Attribute {http://www.w3.org/1999/xlink}type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'type'), 'type', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinktype', pyxb.bundles.common.xlink.typeType, fixed=True, unicode_default='simple')
__type._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 29, 1)
__type._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 112, 2)
type = property(__type.value, __type.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}href uses Python identifier href
__href = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'href'), 'href', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinkhref', pyxb.bundles.common.xlink.hrefType)
__href._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 42, 1)
__href._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 113, 2)
href = property(__href.value, __href.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'role'), 'role', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinkrole', pyxb.bundles.common.xlink.roleType)
__role._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 48, 1)
__role._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 114, 2)
role = property(__role.value, __role.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}arcrole uses Python identifier arcrole
__arcrole = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'arcrole'), 'arcrole', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinkarcrole', pyxb.bundles.common.xlink.arcroleType)
__arcrole._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 56, 1)
__arcrole._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 115, 2)
arcrole = property(__arcrole.value, __arcrole.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}title uses Python identifier title
__title = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'title'), 'title', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinktitle', pyxb.bundles.common.xlink.titleAttrType)
__title._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 64, 1)
__title._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 116, 2)
title = property(__title.value, __title.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}show uses Python identifier show
__show = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'show'), 'show', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinkshow', pyxb.bundles.common.xlink.showType)
__show._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 70, 1)
__show._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 117, 2)
show = property(__show.value, __show.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}actuate uses Python identifier actuate
__actuate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'actuate'), 'actuate', '__httpwww_opengis_netcitygml2_0_AddressPropertyType_httpwww_w3_org1999xlinkactuate', pyxb.bundles.common.xlink.actuateType)
__actuate._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 82, 1)
__actuate._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 118, 2)
actuate = property(__actuate.value, __actuate.set, None, None)
_ElementMap.update({
__Address.name() : __Address
})
_AttributeMap.update({
__remoteSchema.name() : __remoteSchema,
__type.name() : __type,
__href.name() : __href,
__role.name() : __role,
__arcrole.name() : __arcrole,
__title.name() : __title,
__show.name() : __show,
__actuate.name() : __actuate
})
_module_typeBindings.AddressPropertyType = AddressPropertyType
Namespace.addCategoryObject('typeBinding', 'AddressPropertyType', AddressPropertyType)
# Complex type {http://www.opengis.net/citygml/2.0}AddressType with content type ELEMENT_ONLY
class AddressType (teaser.data.bindings.opengis.raw.gml.AbstractFeatureType):
"""Type for addresses. It references the xAL address standard issued by the OASIS consortium. Please note,
that addresses are modelled as GML features. Every address can be assigned zero or more 2D or 3D point geometries (one
gml:MultiPoint geometry) locating the entrance(s). """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AddressType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 200, 1)
_ElementMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureType._ElementMap.copy()
_AttributeMap = teaser.data.bindings.opengis.raw.gml.AbstractFeatureType._AttributeMap.copy()
# Base type is teaser.data.bindings.opengis.raw.gml.AbstractFeatureType
# Element {http://www.opengis.net/citygml/2.0}xalAddress uses Python identifier xalAddress
__xalAddress = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'xalAddress'), 'xalAddress', '__httpwww_opengis_netcitygml2_0_AddressType_httpwww_opengis_netcitygml2_0xalAddress', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 209, 5), )
xalAddress = property(__xalAddress.value, __xalAddress.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}multiPoint uses Python identifier multiPoint
__multiPoint = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'multiPoint'), 'multiPoint', '__httpwww_opengis_netcitygml2_0_AddressType_httpwww_opengis_netcitygml2_0multiPoint', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 210, 5), )
multiPoint = property(__multiPoint.value, __multiPoint.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}_GenericApplicationPropertyOfAddress uses Python identifier GenericApplicationPropertyOfAddress
__GenericApplicationPropertyOfAddress = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfAddress'), 'GenericApplicationPropertyOfAddress', '__httpwww_opengis_netcitygml2_0_AddressType_httpwww_opengis_netcitygml2_0_GenericApplicationPropertyOfAddress', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 219, 1), )
GenericApplicationPropertyOfAddress = property(__GenericApplicationPropertyOfAddress.value, __GenericApplicationPropertyOfAddress.set, None, None)
# Element boundedBy ({http://www.opengis.net/gml}boundedBy) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element location ({http://www.opengis.net/gml}location) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element metaDataProperty ({http://www.opengis.net/gml}metaDataProperty) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element name ({http://www.opengis.net/gml}name) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element description ({http://www.opengis.net/gml}description) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Attribute id inherited from {http://www.opengis.net/gml}AbstractGMLType
_ElementMap.update({
__xalAddress.name() : __xalAddress,
__multiPoint.name() : __multiPoint,
__GenericApplicationPropertyOfAddress.name() : __GenericApplicationPropertyOfAddress
})
_AttributeMap.update({
})
_module_typeBindings.AddressType = AddressType
Namespace.addCategoryObject('typeBinding', 'AddressType', AddressType)
# Complex type {http://www.opengis.net/citygml/2.0}xalAddressPropertyType with content type ELEMENT_ONLY
class xalAddressPropertyType (pyxb.binding.basis.complexTypeDefinition):
"""Denotes the relation of an Address feature to the xAL address element."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'xalAddressPropertyType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 221, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:oasis:names:tc:ciq:xsdschema:xAL:2.0}AddressDetails uses Python identifier AddressDetails
__AddressDetails = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_xAL, 'AddressDetails'), 'AddressDetails', '__httpwww_opengis_netcitygml2_0_xalAddressPropertyType_urnoasisnamestcciqxsdschemaxAL2_0AddressDetails', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/xAL/xAL.xsd', 44, 1), )
AddressDetails = property(__AddressDetails.value, __AddressDetails.set, None, 'This container defines the details of the address. Can define multiple addresses including tracking address history')
_ElementMap.update({
__AddressDetails.name() : __AddressDetails
})
_AttributeMap.update({
})
_module_typeBindings.xalAddressPropertyType = xalAddressPropertyType
Namespace.addCategoryObject('typeBinding', 'xalAddressPropertyType', xalAddressPropertyType)
# Complex type {http://www.opengis.net/citygml/2.0}ImplicitGeometryType with content type ELEMENT_ONLY
class ImplicitGeometryType (teaser.data.bindings.opengis.raw.gml.AbstractGMLType):
""" Type for the implicit representation of a geometry. An implicit geometry is a geometric object, where the
shape is stored only once as a prototypical geometry, e.g. a tree or other vegetation object, a traffic light or a traffic
sign. This prototypic geometry object is re-used or referenced many times, wherever the corresponding feature occurs in
the 3D city model. Each occurrence is represented by a link to the prototypic shape geometry (in a local cartesian
coordinate system), by a transforma-tion matrix that is multiplied with each 3D coordinate tuple of the prototype, and by
an anchor point denoting the base point of the object in the world coordinate reference system. In order to determine the
absolute coordinates of an implicit geometry, the anchor point coordinates have to be added to the matrix multiplication
results. The transformation matrix accounts for the intended rotation, scaling, and local translation of the prototype. It
is a 4x4 matrix that is multiplied with the prototype coordinates using homogeneous coordinates, i.e. (x,y,z,1). This way
even a projection might be modelled by the transformation matrix. The concept of implicit geometries is an enhancement of
the geometry model of GML3. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ImplicitGeometryType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 232, 1)
_ElementMap = teaser.data.bindings.opengis.raw.gml.AbstractGMLType._ElementMap.copy()
_AttributeMap = teaser.data.bindings.opengis.raw.gml.AbstractGMLType._AttributeMap.copy()
# Base type is teaser.data.bindings.opengis.raw.gml.AbstractGMLType
# Element {http://www.opengis.net/citygml/2.0}mimeType uses Python identifier mimeType
__mimeType = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'mimeType'), 'mimeType', '__httpwww_opengis_netcitygml2_0_ImplicitGeometryType_httpwww_opengis_netcitygml2_0mimeType', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 249, 5), )
mimeType = property(__mimeType.value, __mimeType.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}transformationMatrix uses Python identifier transformationMatrix
__transformationMatrix = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'transformationMatrix'), 'transformationMatrix', '__httpwww_opengis_netcitygml2_0_ImplicitGeometryType_httpwww_opengis_netcitygml2_0transformationMatrix', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 250, 5), )
transformationMatrix = property(__transformationMatrix.value, __transformationMatrix.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}libraryObject uses Python identifier libraryObject
__libraryObject = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'libraryObject'), 'libraryObject', '__httpwww_opengis_netcitygml2_0_ImplicitGeometryType_httpwww_opengis_netcitygml2_0libraryObject', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 251, 5), )
libraryObject = property(__libraryObject.value, __libraryObject.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}relativeGMLGeometry uses Python identifier relativeGMLGeometry
__relativeGMLGeometry = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'relativeGMLGeometry'), 'relativeGMLGeometry', '__httpwww_opengis_netcitygml2_0_ImplicitGeometryType_httpwww_opengis_netcitygml2_0relativeGMLGeometry', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 252, 5), )
relativeGMLGeometry = property(__relativeGMLGeometry.value, __relativeGMLGeometry.set, None, None)
# Element {http://www.opengis.net/citygml/2.0}referencePoint uses Python identifier referencePoint
__referencePoint = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'referencePoint'), 'referencePoint', '__httpwww_opengis_netcitygml2_0_ImplicitGeometryType_httpwww_opengis_netcitygml2_0referencePoint', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 253, 5), )
referencePoint = property(__referencePoint.value, __referencePoint.set, None, None)
# Element metaDataProperty ({http://www.opengis.net/gml}metaDataProperty) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element name ({http://www.opengis.net/gml}name) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element description ({http://www.opengis.net/gml}description) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Attribute id inherited from {http://www.opengis.net/gml}AbstractGMLType
_ElementMap.update({
__mimeType.name() : __mimeType,
__transformationMatrix.name() : __transformationMatrix,
__libraryObject.name() : __libraryObject,
__relativeGMLGeometry.name() : __relativeGMLGeometry,
__referencePoint.name() : __referencePoint
})
_AttributeMap.update({
})
_module_typeBindings.ImplicitGeometryType = ImplicitGeometryType
Namespace.addCategoryObject('typeBinding', 'ImplicitGeometryType', ImplicitGeometryType)
# Complex type {http://www.opengis.net/citygml/2.0}ImplicitRepresentationPropertyType with content type ELEMENT_ONLY
class ImplicitRepresentationPropertyType (pyxb.binding.basis.complexTypeDefinition):
"""Denotes the relation of a _CityObject to its implicit geometry representation, which is a representation
of a geometry by referencing a prototype and transforming it to its real position in space. The
ImplicitRepresentationPropertyType element must either carry a reference to a ImplicitGeometry object or contain a
ImplicitGeometry object inline, but neither both nor none. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ImplicitRepresentationPropertyType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 261, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.opengis.net/citygml/2.0}ImplicitGeometry uses Python identifier ImplicitGeometry
__ImplicitGeometry = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ImplicitGeometry'), 'ImplicitGeometry', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_opengis_netcitygml2_0ImplicitGeometry', False, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 259, 1), )
ImplicitGeometry = property(__ImplicitGeometry.value, __ImplicitGeometry.set, None, None)
# Attribute {http://www.opengis.net/gml}remoteSchema uses Python identifier remoteSchema
__remoteSchema = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace_gml, 'remoteSchema'), 'remoteSchema', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_opengis_netgmlremoteSchema', pyxb.binding.datatypes.anyURI)
__remoteSchema._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 258, 1)
__remoteSchema._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 269, 2)
remoteSchema = property(__remoteSchema.value, __remoteSchema.set, None, 'Reference to an XML Schema fragment that specifies the content model of the propertys value. This is in conformance with the XML Schema Section 4.14 Referencing Schemas from Elsewhere.')
# Attribute {http://www.w3.org/1999/xlink}type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'type'), 'type', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinktype', pyxb.bundles.common.xlink.typeType, fixed=True, unicode_default='simple')
__type._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 29, 1)
__type._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 112, 2)
type = property(__type.value, __type.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}href uses Python identifier href
__href = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'href'), 'href', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinkhref', pyxb.bundles.common.xlink.hrefType)
__href._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 42, 1)
__href._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 113, 2)
href = property(__href.value, __href.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'role'), 'role', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinkrole', pyxb.bundles.common.xlink.roleType)
__role._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 48, 1)
__role._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 114, 2)
role = property(__role.value, __role.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}arcrole uses Python identifier arcrole
__arcrole = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'arcrole'), 'arcrole', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinkarcrole', pyxb.bundles.common.xlink.arcroleType)
__arcrole._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 56, 1)
__arcrole._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 115, 2)
arcrole = property(__arcrole.value, __arcrole.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}title uses Python identifier title
__title = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'title'), 'title', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinktitle', pyxb.bundles.common.xlink.titleAttrType)
__title._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 64, 1)
__title._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 116, 2)
title = property(__title.value, __title.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}show uses Python identifier show
__show = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'show'), 'show', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinkshow', pyxb.bundles.common.xlink.showType)
__show._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 70, 1)
__show._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 117, 2)
show = property(__show.value, __show.set, None, None)
# Attribute {http://www.w3.org/1999/xlink}actuate uses Python identifier actuate
__actuate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(_Namespace, 'actuate'), 'actuate', '__httpwww_opengis_netcitygml2_0_ImplicitRepresentationPropertyType_httpwww_w3_org1999xlinkactuate', pyxb.bundles.common.xlink.actuateType)
__actuate._DeclarationLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 82, 1)
__actuate._UseLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/common/schemas/xlink.xsd', 118, 2)
actuate = property(__actuate.value, __actuate.set, None, None)
_ElementMap.update({
__ImplicitGeometry.name() : __ImplicitGeometry
})
_AttributeMap.update({
__remoteSchema.name() : __remoteSchema,
__type.name() : __type,
__href.name() : __href,
__role.name() : __role,
__arcrole.name() : __arcrole,
__title.name() : __title,
__show.name() : __show,
__actuate.name() : __actuate
})
_module_typeBindings.ImplicitRepresentationPropertyType = ImplicitRepresentationPropertyType
Namespace.addCategoryObject('typeBinding', 'ImplicitRepresentationPropertyType', ImplicitRepresentationPropertyType)
# Complex type {http://www.opengis.net/citygml/2.0}AbstractSiteType with content type ELEMENT_ONLY
class AbstractSiteType (AbstractCityObjectType):
"""Type describing the abstract superclass for buildings, facilities, etc. Future extensions of CityGML like
bridges and tunnels would be modelled as subclasses of _Site. As subclass of _CityObject, a _Site inherits all attributes
and relations, in particular an id, names, external references, and generalization relations. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AbstractSiteType')
_XSDLocation = pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 96, 1)
_ElementMap = AbstractCityObjectType._ElementMap.copy()
_AttributeMap = AbstractCityObjectType._AttributeMap.copy()
# Base type is AbstractCityObjectType
# Element creationDate ({http://www.opengis.net/citygml/2.0}creationDate) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element terminationDate ({http://www.opengis.net/citygml/2.0}terminationDate) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element externalReference ({http://www.opengis.net/citygml/2.0}externalReference) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element generalizesTo ({http://www.opengis.net/citygml/2.0}generalizesTo) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element relativeToTerrain ({http://www.opengis.net/citygml/2.0}relativeToTerrain) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element relativeToWater ({http://www.opengis.net/citygml/2.0}relativeToWater) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element GenericApplicationPropertyOfCityObject ({http://www.opengis.net/citygml/2.0}_GenericApplicationPropertyOfCityObject) inherited from {http://www.opengis.net/citygml/2.0}AbstractCityObjectType
# Element {http://www.opengis.net/citygml/2.0}_GenericApplicationPropertyOfSite uses Python identifier GenericApplicationPropertyOfSite
__GenericApplicationPropertyOfSite = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfSite'), 'GenericApplicationPropertyOfSite', '__httpwww_opengis_netcitygml2_0_AbstractSiteType_httpwww_opengis_netcitygml2_0_GenericApplicationPropertyOfSite', True, pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 113, 1), )
GenericApplicationPropertyOfSite = property(__GenericApplicationPropertyOfSite.value, __GenericApplicationPropertyOfSite.set, None, None)
# Element boundedBy ({http://www.opengis.net/gml}boundedBy) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element location ({http://www.opengis.net/gml}location) inherited from {http://www.opengis.net/gml}AbstractFeatureType
# Element metaDataProperty ({http://www.opengis.net/gml}metaDataProperty) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element name ({http://www.opengis.net/gml}name) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Element description ({http://www.opengis.net/gml}description) inherited from {http://www.opengis.net/gml}AbstractGMLType
# Attribute id inherited from {http://www.opengis.net/gml}AbstractGMLType
_ElementMap.update({
__GenericApplicationPropertyOfSite.name() : __GenericApplicationPropertyOfSite
})
_AttributeMap.update({
})
_module_typeBindings.AbstractSiteType = AbstractSiteType
Namespace.addCategoryObject('typeBinding', 'AbstractSiteType', AbstractSiteType)
GenericApplicationPropertyOfCityModel = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityModel'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 43, 1))
Namespace.addCategoryObject('elementBinding', GenericApplicationPropertyOfCityModel.name().localName(), GenericApplicationPropertyOfCityModel)
cityObjectMember = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'cityObjectMember'), teaser.data.bindings.opengis.raw.gml.FeaturePropertyType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 47, 1))
Namespace.addCategoryObject('elementBinding', cityObjectMember.name().localName(), cityObjectMember)
GenericApplicationPropertyOfCityObject = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityObject'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 92, 1))
Namespace.addCategoryObject('elementBinding', GenericApplicationPropertyOfCityObject.name().localName(), GenericApplicationPropertyOfCityObject)
GenericApplicationPropertyOfSite = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfSite'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 113, 1))
Namespace.addCategoryObject('elementBinding', GenericApplicationPropertyOfSite.name().localName(), GenericApplicationPropertyOfSite)
GenericApplicationPropertyOfAddress = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfAddress'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 219, 1))
Namespace.addCategoryObject('elementBinding', GenericApplicationPropertyOfAddress.name().localName(), GenericApplicationPropertyOfAddress)
CityModel = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'CityModel'), CityModelType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 41, 1))
Namespace.addCategoryObject('elementBinding', CityModel.name().localName(), CityModel)
CityObject = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_CityObject'), AbstractCityObjectType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 74, 1))
Namespace.addCategoryObject('elementBinding', CityObject.name().localName(), CityObject)
Address = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Address'), AddressType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 217, 1))
Namespace.addCategoryObject('elementBinding', Address.name().localName(), Address)
ImplicitGeometry = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ImplicitGeometry'), ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 259, 1))
Namespace.addCategoryObject('elementBinding', ImplicitGeometry.name().localName(), ImplicitGeometry)
Site = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_Site'), AbstractSiteType, abstract=pyxb.binding.datatypes.boolean(1), location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 111, 1))
Namespace.addCategoryObject('elementBinding', Site.name().localName(), Site)
CityModelType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityModel'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), scope=CityModelType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 43, 1)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 108, 5))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 109, 5))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 35, 5))
counters.add(cc_7)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'metaDataProperty')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'description')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'boundedBy')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'location')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'featureMember')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 108, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'featureMembers')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 109, 5))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(CityModelType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityModel')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 35, 5))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CityModelType._Automaton = _BuildAutomaton()
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'creationDate'), pyxb.binding.datatypes.date, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'terminationDate'), pyxb.binding.datatypes.date, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'externalReference'), ExternalReferenceType, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'generalizesTo'), GeneralizationRelationType, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'relativeToTerrain'), RelativeToTerrainType, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'relativeToWater'), RelativeToWaterType, scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5)))
AbstractCityObjectType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityObject'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), scope=AbstractCityObjectType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 92, 1)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 68, 5))
counters.add(cc_11)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'metaDataProperty')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'description')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'boundedBy')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'location')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'creationDate')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'terminationDate')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'externalReference')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'generalizesTo')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relativeToTerrain')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relativeToWater')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5))
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(AbstractCityObjectType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityObject')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 68, 5))
st_11 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_10, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_11, True) ]))
st_11._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
AbstractCityObjectType._Automaton = _BuildAutomaton_()
GeneralizationRelationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_CityObject'), AbstractCityObjectType, abstract=pyxb.binding.datatypes.boolean(1), scope=GeneralizationRelationType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 74, 1)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 124, 2))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(GeneralizationRelationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_CityObject')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 125, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
GeneralizationRelationType._Automaton = _BuildAutomaton_2()
ExternalReferenceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'informationSystem'), pyxb.binding.datatypes.anyURI, scope=ExternalReferenceType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 141, 3)))
ExternalReferenceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'externalObject'), ExternalObjectReferenceType, scope=ExternalReferenceType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 142, 3)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 141, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(ExternalReferenceType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'informationSystem')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 141, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ExternalReferenceType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'externalObject')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 142, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ExternalReferenceType._Automaton = _BuildAutomaton_3()
ExternalObjectReferenceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'name'), pyxb.binding.datatypes.string, scope=ExternalObjectReferenceType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 148, 3)))
ExternalObjectReferenceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'uri'), pyxb.binding.datatypes.anyURI, scope=ExternalObjectReferenceType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 149, 3)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(ExternalObjectReferenceType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 148, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ExternalObjectReferenceType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'uri')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 149, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ExternalObjectReferenceType._Automaton = _BuildAutomaton_4()
AddressPropertyType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Address'), AddressType, scope=AddressPropertyType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 217, 1)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 194, 2))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(AddressPropertyType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Address')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 195, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
AddressPropertyType._Automaton = _BuildAutomaton_5()
AddressType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'xalAddress'), xalAddressPropertyType, scope=AddressType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 209, 5)))
AddressType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'multiPoint'), teaser.data.bindings.opengis.raw.gml.MultiPointPropertyType, scope=AddressType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 210, 5)))
AddressType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfAddress'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), scope=AddressType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 219, 1)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 210, 5))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 211, 5))
counters.add(cc_6)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'metaDataProperty')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'description')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'boundedBy')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'location')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'xalAddress')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 209, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'multiPoint')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 210, 5))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(AddressType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfAddress')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 211, 5))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, True) ]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
AddressType._Automaton = _BuildAutomaton_6()
xalAddressPropertyType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_xAL, 'AddressDetails'), teaser.data.bindings.opengis.misc.raw.xAL.AddressDetails_, scope=xalAddressPropertyType, documentation='This container defines the details of the address. Can define multiple addresses including tracking address history', location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/xAL/xAL.xsd', 44, 1)))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(xalAddressPropertyType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_xAL, 'AddressDetails')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 226, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
xalAddressPropertyType._Automaton = _BuildAutomaton_7()
ImplicitGeometryType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'mimeType'), teaser.data.bindings.opengis.raw.gml.CodeType, scope=ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 249, 5)))
ImplicitGeometryType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'transformationMatrix'), TransformationMatrix4x4Type, scope=ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 250, 5)))
ImplicitGeometryType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'libraryObject'), pyxb.binding.datatypes.anyURI, scope=ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 251, 5)))
ImplicitGeometryType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'relativeGMLGeometry'), teaser.data.bindings.opengis.raw.gml.GeometryPropertyType, scope=ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 252, 5)))
ImplicitGeometryType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'referencePoint'), teaser.data.bindings.opengis.raw.gml.PointPropertyType, scope=ImplicitGeometryType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 253, 5)))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 249, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 250, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 251, 5))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 252, 5))
counters.add(cc_6)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'metaDataProperty')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'description')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'mimeType')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 249, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'transformationMatrix')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 250, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'libraryObject')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 251, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relativeGMLGeometry')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 252, 5))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ImplicitGeometryType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'referencePoint')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 253, 5))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ImplicitGeometryType._Automaton = _BuildAutomaton_8()
ImplicitRepresentationPropertyType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ImplicitGeometry'), ImplicitGeometryType, scope=ImplicitRepresentationPropertyType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 259, 1)))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 268, 2))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(ImplicitRepresentationPropertyType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ImplicitGeometry')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 269, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
ImplicitRepresentationPropertyType._Automaton = _BuildAutomaton_9()
AbstractSiteType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfSite'), pyxb.binding.datatypes.anyType, abstract=pyxb.binding.datatypes.boolean(1), scope=AbstractSiteType, location=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 113, 1)))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 68, 5))
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 105, 5))
counters.add(cc_12)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'metaDataProperty')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 55, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'description')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 56, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'name')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/gmlBase.xsd', 57, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'boundedBy')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 28, 5))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_gml, 'location')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/gml/3.1.1/base/feature.xsd', 29, 5))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'creationDate')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 62, 5))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'terminationDate')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 63, 5))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'externalReference')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 64, 5))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'generalizesTo')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 65, 5))
st_8 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relativeToTerrain')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 66, 5))
st_9 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relativeToWater')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 67, 5))
st_10 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfCityObject')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 68, 5))
st_11 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(AbstractSiteType._UseForTag(pyxb.namespace.ExpandedName(Namespace, '_GenericApplicationPropertyOfSite')), pyxb.utils.utility.Location('/home/micha/GIT/pyxb/pyxb/bundles/opengis/schemas/citygml/2.0/cityGMLBase.xsd', 105, 5))
st_12 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_11, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_12, True) ]))
st_12._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
AbstractSiteType._Automaton = _BuildAutomaton_10()
cityObjectMember._setSubstitutionGroup(teaser.data.bindings.opengis.raw.gml.featureMember)
CityModel._setSubstitutionGroup(teaser.data.bindings.opengis.raw.gml.FeatureCollection)
CityObject._setSubstitutionGroup(teaser.data.bindings.opengis.raw.gml.Feature)
Address._setSubstitutionGroup(teaser.data.bindings.opengis.raw.gml.Feature)
ImplicitGeometry._setSubstitutionGroup(teaser.data.bindings.opengis.raw.gml.GML)
Site._setSubstitutionGroup(CityObject)
| 67.028188
| 465
| 0.765994
| 18,230
| 149,808
| 6.122436
| 0.039715
| 0.054833
| 0.060316
| 0.072573
| 0.82225
| 0.806965
| 0.792961
| 0.779513
| 0.740028
| 0.724537
| 0
| 0.023312
| 0.11235
| 149,808
| 2,234
| 466
| 67.058192
| 0.816021
| 0.130407
| 0
| 0.722494
| 1
| 0.124694
| 0.222274
| 0.183481
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007946
| false
| 0
| 0.01467
| 0
| 0.16687
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
396d5d831016a67413db0154b942ec1537b5e76f
| 284
|
py
|
Python
|
build/lib/annotation_utils/ndds/structs/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 13
|
2020-01-28T04:45:22.000Z
|
2022-03-10T03:35:49.000Z
|
build/lib/annotation_utils/ndds/structs/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 4
|
2020-02-14T08:56:03.000Z
|
2021-05-21T10:38:30.000Z
|
build/lib/annotation_utils/ndds/structs/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 7
|
2020-04-10T07:56:25.000Z
|
2021-12-17T11:19:23.000Z
|
from .objects import NDDS_Annotation_Object, CameraData
from .handlers import NDDS_Annotation_Object_Handler
from .annotation import NDDS_Annotation
from .frame import NDDS_Frame, NDDS_Frame_Handler
from .settings import CameraConfig, ObjectSettings
from .dataset import NDDS_Dataset
| 40.571429
| 55
| 0.873239
| 37
| 284
| 6.432432
| 0.378378
| 0.210084
| 0.252101
| 0.218487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09507
| 284
| 6
| 56
| 47.333333
| 0.92607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
397623e9ecc79b0307c1ce86dcee8e37ad7b5c18
| 406
|
py
|
Python
|
imix/models/visual_dialog_model/__init__.py
|
linxi1158/iMIX
|
af87a17275f02c94932bb2e29f132a84db812002
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
imix/models/visual_dialog_model/__init__.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
imix/models/visual_dialog_model/__init__.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
# from .vilbert_dialog import VisDiaBertEmbeddingsDialog, VisDiaBertImageEmbeddings, VisDiaBertEncoder, \
# VisDiaBertTextPooler, VisDiaBertImagePooler, VisDiaBertPreTrainingHeads
#
# __all__ = [
# 'VisDiaBertEmbeddingsDialog', 'VisDiaBertImageEmbeddings', 'VisDiaBertEncoder', 'VisDiaBertTextPooler',
# 'VisDiaBertImagePooler', 'VisDiaBertPreTrainingHeads',
# ]
from .vilbert_dialog import *
| 40.6
| 109
| 0.800493
| 21
| 406
| 15.190476
| 0.52381
| 0.068966
| 0.106583
| 0.144201
| 0.846395
| 0.846395
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108374
| 406
| 9
| 110
| 45.111111
| 0.881215
| 0.8867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
39eeca6aaffe58d2000b4f249c15a003d7e28fda
| 15,472
|
py
|
Python
|
src/repnano/test/evaluate.py
|
organic-chemistry/RepNano
|
3f2e21f2006217f6438b16e6800b7f4f327c9210
|
[
"MIT"
] | 5
|
2020-04-30T18:37:53.000Z
|
2021-01-26T05:49:10.000Z
|
src/repnano/test/evaluate.py
|
organic-chemistry/RepNano
|
3f2e21f2006217f6438b16e6800b7f4f327c9210
|
[
"MIT"
] | 5
|
2020-04-29T10:30:57.000Z
|
2022-01-24T15:29:28.000Z
|
src/repnano/test/evaluate.py
|
organic-chemistry/RepNano
|
3f2e21f2006217f6438b16e6800b7f4f327c9210
|
[
"MIT"
] | 3
|
2021-01-28T15:01:01.000Z
|
2022-02-14T11:15:44.000Z
|
from ..models.predict_model import process
import os
import subprocess
import tensorflow as tf
import keras.backend.tensorflow_backend as KTF
S = tf.Session(config=tf.ConfigProto(intra_op_parallelism_threads=1))
KTF.set_session(S)
weights = "data/cluster/training/v9p5delta10-bis/my_model_weights-940.h5"
# weights = "data/cluster/training/v9p5delta10-new-weight-longer/my_model_weights-60.h5" # a lot of B
# weights = "data/cluster/training//v9p5-delta10-oversamplingL/my_model_weights-190.h5"
# weights = "data/cluster/training//v9p5-delta10-ref-from-file-only-T/my_model_weights-470.h5"
# weights = "data/cluster/training//v9p5-delta10-ref-from-file/my_model_weights-50.h5"
# weights = "data/cluster/training/v9p5-delta10-oversamplingB/my_model_weights-20.h5"
weights = "data/cluster/training/v9p5-delta10-ref-from-file-bis-max-files/my_model_weights-9300.h5"
weights = "data/training/al-8-bases/my_model_weights-90.h5"
weights = "data/training/test-single-base/my_model_weights-180.h5"
weights = "data/cluster/training/test-single-base-filter/my_model_weights-60.h5"
weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
# weights = "data/cluster/training/skip-new/my_model_weights-7590.h5"
# weights = "data/cluster/training/allign-agree-five/my_model_weights-2560.h5"
weights = "data/cluster/training/allign-agree-five-clean-B/my_model_weights-3990.h5"
# weights = "data/cluster/training/test-single-various-w-size-8bases/my_model_weights-30.h5"
# weights = "data/training/test-single-base-bis/my_model_weights-0.h5"
weights = "data/cluster/training/test-single-various-w-size-8bases-smallerw/my_model_weights-1800.h5"
weights = "data/cluster/training/allign-agree-five-clean-B-smallB/my_model_weights-5990.h5"
weights = "data/cluster/training/allign-agree-five-clean-B-smallB-test-ssample/my_model_weights-2480.h5"
weights = "data/cluster/training/test-single-various-w-size-8bases-smallw-fixed/my_model_weights-500.h5"
basename = "results/v9p5-best-B-20170908-R9.5-newchem-test-clean-window_size/"
weights = "data/cluster/training/allign-agree-85555/my_model_weights-3900.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw/"
"""
weights = "data/cluster/training/allign-agree-63333/my_model_weights-1160.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-sw/"
"""
weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
basename = "results/ref/"
weights = "data/cluster/training/allign-agree-85555-ctc20/my_model_weights-3990.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw-ctc20/"
"""
weights = "data/cluster/training/allign-agree-85555-8b/my_model_weights-1960.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-8test/"
"""
# weights = "data/cluster/training/allign-agree-85555/my_model_weights-3900.h5"
# basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw/"
"""
weights = "data/cluster/training/allign-agree-85555-BI-ctc20/my_model_weights-1990.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw-ctc20-BI/"
weights = "data/cluster/training/allign-agree-63333-8b/my_model_weights-1950.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-8test/"
"""
weights = "data/cluster/training/allign-agree-85555-ctc200/my_model_weights-280.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw-ctc200/"
"""
weights = "data/cluster/training//clean_scale_85555-ctc50-8B/my_model_weights-1990.h5"
basename = "results/clean-ctc50-8B/"
weights = "data/cluster/training//clean_scale_l3_85555-ctc50/my_model_weights-790.h5"
basename = "results/clean-l3-ctc50/"
weights = "data/cluster/training/ref_85555-ctc50-drop/my_model_weights-2670.h5"
basename = "results/ref/"
weights = "data/cluster/training/clean_two_scale_l3_85555-ctc50-drop-clean-B-lr0p001/my_model_weights-810.h5"
basename = "results/clean-l3-clean/"
weights = "data/cluster/training/clean_two_scale_l3_85555-ctc200-agree-align-cleanB/my_model_weights-160.h5"
basename = "results/clean-l3-noise/"
"""
weights = "data/cluster/training/agree-align-cleanB-ctc200//my_model_weights-110.h5"
basename = "results/clean-ctc200-ramp/"
weights = "data/cluster/training/allign-no-agree-85555-ctc200/my_model_weights-190.h5"
basename = "results/no-agree-ctc200-no-agree/"
"""
weights = "data/cluster/training/clean_two_scale_l3_85555-ctc200-agree-align-cleanB/my_model_weights-160.h5"
basename = "results/clean-l3-noise/"
weights = "data/cluster/training/allign-no-agree-85555-ctc200/my_model_weights-300.h5"
basename = "results/no-agree-ctc200-no-agree-w300/"
weights = "data/cluster/training/allign-agree-85555/my_model_weights-3900.h5"
basename = "results/v9p5-best-B-20170908-R9.5-froms-two-200-seg-last-smaller-explo-bw/"
weights = "data/cluster/training/allign-agree-85555-ctc200-residual/my_model_weights-1060.h5"
basename = "results/resid/"
weights = "data/cluster/training//allign-no-agree-85555-ctc200-from-improoved/my_model_weights-390.h5"
basename = "results/from-improved/"
"""
weights = "data/cluster/training/allign-agree-85555-ctc20-attention/my_model_weights-1990.h5"
basename = "results/attention/"
weights = "data/cluster/training/allign-agree-85555-ctc200-residual-attention/my_model_weights-460.h5"
basename = "results/res-attention/"
weights = "data/cluster/training/allign-agree-85555-ctc200-other-bases/my_model_weights-1070.h5"
basename = "results/other-base/"
weights = "data/cluster/training/allign-agree-85555-ctc200-clean-test/my_model_weights-10.h5"
basename = "results/clean-test/"
# weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
# basename = "results/ref-nodetect/"
weights = "data/cluster/training/allign-no-agree-85555-ctc200/my_model_weights-190.h5"
basename = "results/no-agree-ctc200-no-agree/"
weights = "data/cluster/training/training_set_from_old_pre_trained/my_model_weights-400.h5"
basename = "fresh_pre_trained"
"""
weights = "data/cluster/training/allign-agree-85555-ctc400/my_model_weights-340.h5"
basename = "results/cctc400/"
"""
weights = "data/cluster/training/training_set_from_old_residulal_clean/my_model_weights-530.h5"
basename = "fresh_pre_trained-clean"
weights = "data/cluster/training/training_set_from_old_residulal_clean/my_model_weights-530.h5"
basename = "fresh_pre_trained-clean-mw"
weights = "data/cluster/training/training_set_from_old_residulal_clean/my_model_weights-530.h5"
basename = "fresh_pre_trained-clean-mw-median"
weights = "data/cluster/training/training_set_from_old_residual_clean-correct-std/my_model_weights-490.h5"
basename = "results/clean-std-ctc200/"
"""
weights = "data/cluster/training/training_set_from_old_residual_attention_clean-correct-std-ctc50/my_model_weights-440.h5"
basename = "results/clean-std-ctc200-attention/"
weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
basename = "results/ref/"
weights = "data/cluster/training/allign-no-agree-85555-ctc200/my_model_weights-190.h5"
basename = "results/no-agree-ctc200-no-agree-clean-std/"
weights = "data/cluster/training//training_set_from_best_so_far_residual_clean_ctc200/my_model_weights-60.h5"
basename = "results/from_best_resi_cleanctc200/"
# weights = "data/cluster/training/training_set_from_best_so_far_residual_clean_ctc20/my_model_weights-280.h5"
# basename = "results/from_best_resi_cleanctc20/"
# weights = "data/cluster/training/training_set_from_best_so_far_residual_attention_clean_ctc20/my_model_weights-280.h5"
# basename = "results/from_best_resi_att_cleanctc20/"
"""
weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
basename = "results/ref-std-clean/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200/my_model_weights-490.h5"
basename = "results/std-clean/"
# weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc20/my_model_weights-330.h5"
# basename = "results/std-clean-ctc20/"
# weights = "data/cluster/training/training_correct-std_0p1_residual_attention_clean_ctc20/my_model_weights-300.h5"
# basename = "results/std-clean-att-ctc20/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-b8/my_model_weights-540.h5"
basename = "results/std-clean-b8/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta200/my_model_weights-490.h5"
basename = "results/std-clean-delta200/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-b8-realign/my_model_weights-290.h5"
basename = "results/std-clean-b8/"
# weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-realign/my_model_weights-290.h5"
# basename = "results/std-clean-delta400/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-realign/my_model_weights-290.h5"
basename = "results/std-clean-bTBI/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign/my_model_weights-580.h5"
basename = "results/std-clean-bTBI-cleanI/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBE-clean-E-realign/my_model_weights-500.h5"
basename = "results/std-clean-bTBE-cleanE/"
# weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-b8-realign/my_model_weights-290.h5"
# basename = "results/std-clean-b8/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign-longer/my_model_weights-450.h5"
basename = "results/std-clean-bTBI-cleanI-longer/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-realign/my_model_weights-290.h5"
basename = "results/std-clean-delta400/"
weights = "data/cluster/training/training_correct-std_0p1_residual_clean_ctc200-delta400-realign/my_model_weights-290.h5"
basename = "results/std-clean-delta400-test/"
weights = "data/cluster/training_correct-std_0p1_residual_clean_ctc200-delta400-all-T/my_model_weights-480.h5"
basename = "results/std-clean-delta400-test-allT/"
weights = "data/training/training_correct-std_0p1_residual_clean_ctc200-delta400-realign_my_model_weights-290.h5"
basename = "results/test"
weights = "data/training/my_model_weights-3390-removed-bad-B.h5"
basename = "results/test/"
weights = "../../..//tmp/test/my_model_weights-70.h5"
basename = "results/test-2/"
weights = "data/training/hybrid-ctc200.h5"
basename = "results/test-hybrid-thres20/"
weights = "data/cluster//hybrid-TBI/my_model_weights-0.h5"
#weights = "data/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign_my_model_weights-580.h5"
basename = "results/hybrid-TBI/"
#weights = "data/training/my_model_weights-3390-removed -bad-B.h5"
#basename = "results/ref-std-clean/"
weights = "data/cluster//hybrid-TBI-small-lr/my_model_weights-0.h5"
#weights = "data/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign_my_model_weights-580.h5"
basename = "results/hybrid-TBI-small-lr/"
weights = "data/cluster//hybrid-TBI-smaller-lr/my_model_weights-0.h5"
#weights = "data/training/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign_my_model_weights-580.h5"
basename = "results/hybrid-TBI-smaller-lr/"
weights = "data/training/hybrid-ctc200.h5"
basename = "results/test-hybrid/"
weights = "data/training/hybrid-ctc200.h5"
basename = "results/test-hybrid/"
"""
weights = "data/cluster/training_correct-std_0p1_residual_clean_ctc200-delta400-bTBI-clean-I-realign/my_model_weights-580.h5"
basename = "results/std-clean-bTBI-cleanI/"
"""
weights = "data/training/training_correct-std_0p1_residual_clean_ctc200-delta400-realign_my_model_weights-290.h5"
basename = "results/test1"
ref = "data/external/ref/S288C_reference_sequence_R64-2-1_20150113.fa"
redo = 1
# ref = "data/external/chromFa/*.fa"
# redo = 0
# Evaluate all the sample
list_dir = [["substituted", "sub_template", 5], ["control", "control_template", 5],
["control-k47211", "control-k47211_template", 5]]
list_dir = [["20170908-R9.5/AB-2minBrdU", "20170908-R9.5/prout_2", 5],
["20170908-R9.5/AD-basecalled", "20170908-R9.5/prout", 5],
["20170908-R9.5/AG-basecalled", "20170908-R9.5/BTF_AG_ONT_1_FAH14273_A-select_pass", 8],
["20170908-R9.5/AH-basecalled", "20170908-R9.5/BTF_AH_ONT_1_FAH14319_A-select_pass", 5],
["20170908-R9.5/AG-Thy/", "20170908-R9.5/BTF_AG_ONT_1_FAH14273_A-select_pass", 5],
["20170908-R9.5/AH-BrdU/", "20170908-R9.5/BTF_AH_ONT_1_FAH14319_A-select_pass", 5],
["20170908-R9.5/AI-CldU/0/", "20170908-R9.5/BTF_AI_ONT_1_FAH14242_A-select_pass", 5],
["20170908-R9.5/AK-EdU/0/", "20170908-R9.5/BTF_AK_ONT_1_FAH14211_A-select_pass", 5],
["20170908-R9.5/AL-IdU/0/", "20170908-R9.5/BTF_AL_ONT_1_FAH14352_A-select_pass", 5]]
list_dir1 = [["20170908-R9.5/Human_AR", "20170908-R9.5/human_ar", 5]]
list_dir1 += [["20170908-R9.5/Human_HQ", "20170908-R9.5/human_hq", 5]]
# + list_dir[-3:]: # + list_dir1: # + list_dir[-1:]:
default = list_dir[1:4] + list_dir[-3:-2] + list_dir1
default = list_dir[3:4] # + list_dir1
for dire, out, w in default:
if redo:
process(weights, directory="data/raw/%s/" % dire,
output="data/processed/{0}{1}.fasta".format(basename, out), Nbases=5, reads="",
filter=None, already_detected=False, Nmax=10, size=40,
n_output_network=1, n_input=1, chemistry="rf", window_size=w, clean=True, old=False, res=True,
attention=False)
# filter="data/processed/%s.InDeepNano.test" % outz , already_detected=False)
exex = "python src/test/get_fasta_from_train-test.py data/processed/{0}{1}.fasta all data/processed/{0}{1}_test".format(
basename, out)
subprocess.call(exex, shell=True)
exex = "bwa mem -x ont2d {2} data/processed/{0}{1}_test_T.fasta > data/processed/{0}{1}_test_T.sam".format(
basename, out, ref)
# print(exex)
subprocess.call(exex, shell=True)
exex = "python src/test/ExportStatAlnFromSamYeast.py data/processed/{0}{1}_test_T.sam".format(
basename, out, ref)
subprocess.call(exex, shell=True)
"""
for dire, out in list_dir[:2]:
if redo:
process(weights, directory="data/raw/%s/" % dire,
output="data/processed/{0}{1}.fasta".format(basename, out), Nbases=5, reads="",
filter="data/processed/%s.InDeepNano.test" % outz , already_detected=False)
exex = "python src/test/get_fasta_from_train-test.py data/processed/{0}{1}.fasta data/processed/{1}.InDeepNano.test data/processed/{0}{1}_test".format(
basename, out)
subprocess.call(exex, shell=True)
exex = "bwa mem -x ont2d {2} data/processed/{0}{1}_test_T.fasta > data/processed/{0}{1}_test_T.sam".format(
basename, out, ref)
# print(exex)
subprocess.call(exex, shell=True)
exex = "python src/test/ExportStatAlnFromSamYeast.py data/processed/{0}{1}_test_T.sam".format(
basename, out, ref)
subprocess.call(exex, shell=True)"""
| 46.323353
| 258
| 0.759501
| 2,320
| 15,472
| 4.859052
| 0.125862
| 0.086845
| 0.108046
| 0.156835
| 0.805553
| 0.760046
| 0.739998
| 0.675064
| 0.654307
| 0.614477
| 0
| 0.095935
| 0.090486
| 15,472
| 333
| 259
| 46.462462
| 0.705159
| 0.163392
| 0
| 0.210938
| 0
| 0.179688
| 0.700999
| 0.660237
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.054688
| 0.039063
| 0
| 0.039063
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
840180778d144669324b8fd32fdb16987f9981ea
| 103
|
py
|
Python
|
torchcv/models/ssd/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 433
|
2017-11-30T15:46:58.000Z
|
2022-01-16T08:06:11.000Z
|
torchcv/models/ssd/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 51
|
2018-01-29T15:14:33.000Z
|
2021-08-23T12:02:18.000Z
|
fpn-hoi/torchcv/models/ssd/__init__.py
|
TheFairBear/Box-Attention-SSD-HOI
|
6101e209a709899c5645342784c8f451028ff46e
|
[
"MIT"
] | 92
|
2018-01-20T07:45:36.000Z
|
2021-05-28T10:43:53.000Z
|
from torchcv.models.ssd.net import SSD300, SSD512
from torchcv.models.ssd.box_coder import SSDBoxCoder
| 34.333333
| 52
| 0.84466
| 16
| 103
| 5.375
| 0.6875
| 0.255814
| 0.395349
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 0.087379
| 103
| 2
| 53
| 51.5
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
84041676d8f6716501aee64c8590d14471c44408
| 8,999
|
py
|
Python
|
CS/CSC384/A2/multiagent/submission_autograder.py
|
jerrysun103/uoft
|
6264583d27c7db94596d29c73804e6d9155de191
|
[
"MIT"
] | 2
|
2021-09-13T13:50:09.000Z
|
2021-12-14T07:03:07.000Z
|
CS/CSC384/A2/multiagent/submission_autograder.py
|
jerrysun103/uoft
|
6264583d27c7db94596d29c73804e6d9155de191
|
[
"MIT"
] | null | null | null |
CS/CSC384/A2/multiagent/submission_autograder.py
|
jerrysun103/uoft
|
6264583d27c7db94596d29c73804e6d9155de191
|
[
"MIT"
] | 2
|
2021-10-02T21:43:37.000Z
|
2022-01-08T17:46:14.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from codecs import open
import os, ssl
if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)):
ssl._create_default_https_context = ssl._create_unverified_context
"""
CS 188 Local Submission Autograder
Written by the CS 188 Staff
==============================================================================
_____ _ _
/ ____| | | |
| (___ | |_ ___ _ __ | |
\___ \| __/ _ \| '_ \| |
____) | || (_) | |_) |_|
|_____/ \__\___/| .__/(_)
| |
|_|
Modifying or tampering with this file is a violation of course policy.
If you're having trouble running the autograder, please contact the staff.
==============================================================================
"""
import bz2, base64
exec(bz2.decompress(base64.b64decode('QlpoOTFBWSZTWYta79sAPF9fgHkQfv///3////7////7YB1cElg+nBwVM6AU6FjQQoN93gEIKA71XAFu4xDodFAACAopGqUwAhubhBmwA2BQN9DERBJtTxMERppQ/I0TTQ1HqPTJhT1HqeoDQ0BjKNA0yIBAQpjQp6mU8U8T1T1PEaajQxNDIGQAAARNKeptQAAA0AAAAAAABoAAAJNIogpqabTUamKflMNUPFA00/SDUBoNAAAZA0DSp6gAAA0NAAAGgBoAAAAAAJEhGgJoBAAmTU8ptEDSmY0p5T1PKNpGnqNGhmoNzIfDE+UD0Cz+lhf5Er/yz/JxUYh/3ZVEVRiMif3NWP9llQVOthetsjH1Ws7UkrD8yeHN1CtiqcllY/9WcuPzPLOWI6xQVYL+jbFhrJqwcbGa7lXhorBIMWSfMtYf8fj8e/MD+j83rw/P5/gyFYBMiIrh4sWE38YJe5ULw2G2Lls8jYZwlHZzTAf2qM9DjwuTB4ukv6s8O37Jb8M3bXRPKKKMftpmtIOe7W4IPkJDhCAUFYgrIiCiyLFWQURioxVFkFGMBU7/f+T3T3T8nv+kZ3+k/PS/g+r4XuWa6dwJhO5qVx9a3B+cKV3UUvDroDV+5uZNowEgqNcmwdd3Ro4fy9Mpn9lY6gO7lbXk4mlYZ91k9+GuBbQH/UIO3kyvLIvjOms2BBJLMCngenV66gUb3rAV0xelBNsLQwAb6xQ3GUO+89XWFyp54GQyQIIJBOlMcfFiDss96Y6xLfbTQFiCSTgr7xRJ1nsJl7+AEEW8A8sQAh/divBxvPUP7rxHCYVI6elCk0JB2HlG9Nln8bzK/764UtKTyDbfZMts3ooxsY2hsGxS4g1pi+eLy5pOjERvJlmlNNbtksDQ4mZbz/NzwKVOpsXorjds8W1vFKVfkpb33i7tU9EMgXQRLFkEsRadbRy0fPlXdMZO1NM23LFGmjcrtYKKtLKYw6IYru1xwoOHBRxNKKHG7l0vDdlK6YmpguJu7J6edDw4M45CLIsXkOTQ569AnKqqqwVUOktQtEl4B8W93v+3M7e4Ox357+b8gyd43xokZgm4w4Yrtys41hDp6Nrf8ZWpTeaXLWDinvsEBkH+I2rmHvnEIQcyzWYN1+iiRwJdmu+1RAYmQw9m/Dvz72ueYT45VkOEmWBxVgX0QE6IPHdVdS+1fnwyR8ZxQuRLh5HVTCCSQRQU+A4Gf1ZT6qtkfbmVu4qM41rbKbivQFCvMxzySrTuONuqau4cbVu0WaMY3jLKdEzJh8vXGIFx7x9P6tPHaQdL5YWdLVJLxYutWS/P4eH9/3vtfV/vLYAJq8dNFTJAoKlCDsSl8GQYQShkADh1iHFsrRvHOagajxbS5n3DDk2gc2H3ff/LPBL8+dtnlk+/jPtWJVUI2wri+bq6+907Lz89a3XyJ2dSxHGDCdjDeyfviVHAE8lUmkoeVvW0dyMtGuRQKbscrQ7pKTooFJ8SkImqCbXdlSaBoZQictsbpAVGGcfRqi94O7oMcd6yCMbkvtlrubzD7CXYo0QIyAV54euAtJhRpSGqE2G2plnbfUdsTkm43U4UhsIlYJhOYzEa1E4NRGGQmJCojg1UrmilZpisPW3DO3ZO9xd7vzyxQFxzvITpUV1e/gC139nt/XPdslHYvqiuc862Pb9SMlWTzNutYLH89E4Uxqk4F+V3oflTZF1eq5S4XhtnrjKk8yJVMFr3Qc3mCnRUsSE4rKYkRaT9MM0i7NlZ/rMHvuh7YHU6aMeshw33Uy2IJH2U7HIw67eHbWkmny1xIQeA744v6rr107Ke9206pjMRu/ZY4oajbKmUc96C+3QZ9eBS7drzO/ZQwiksdYX4QaKeUMpsinwoFT9xapdu21uOZb4AQOBzORY9VLwJklazceMOtYlHj6q0rRAA9PF3to+TGIsFAp4lMd8ZvXj4BwnIgGvm+YDyOHNiN5A8BVI3nGN2Vkok3U6Japoojk8Zu2SsSy6ECakAiDmEeMnZyyMTQXX4ZxbrU9kiNyyNw9OblBvHFhImRzH11qWt2vjUYD+M4mhiXFqRNtV8HTWmallWJHaQQxIKi98UnkonuJBPCFbSyw8VGY0zHfy068q6AqmuJR6A/lmtDXL3QJX4NQBwLuCSziC258qMBIMTC3Hhi85HlYnUOvzadsbAGXs04MO2I0WauKVQ2vjC94Gwp9Gc9bQWJXX93MHi13xAQM7y1X1QY9rNjbilh1qn4xKYG5AafVeBE9zsxB1blZigsAnTCxY9lLUUmb1DRZRF8RFM/VNHdjSVrCrQxQLdVHF/noswWQjMsJtzYtltRSZyw/CDBwYImNTc23omgMlflq73vBD3TIngZxMhBq2y3YmWMWW5350xLcvm2S4eyn3ax/YkbOPwPpA0XkAwLUDOzZGbXt7j7wO1fD4euXzD9rS4Ow8aUnmUCQveDgZNedoiE/O4gh7hmH1Uu+C+0DkL7cYsyG3JdHvImxttv3DR3Ds02WfZ0z9/Og6wOH6Y7ge4WRs1LM+vCJRpXrPTbSvOUuz1aNwtzzN6lz0J7nl4BrYYExd7okEEbifZsrG1RAoBSqGnC87nLLMW1fSkUTo06uV8Ul3jW22BSuIrS2lBwLAXiBV8iwCvuHAC2oawo9HefhPmIKQ9kZOwR48F5qJTd5lhHwqhkoQ++8/VNoZnKYSAO9gKIGvJVPo8fd/v/LS+9B5SEQB62GGZYDhpQad3t9Vb+6a5/FiKFmt6ulXz4lVw73lTAN7HsytwEsQQRT2jOm1/o18nHV7g/JUI5nmROaaX0GEcAoExGVVJGQIkqfCnmiTJJ5Ix4erE7fFhEVOX76PeZROA4tVRytxEoegmaoQkwIPSJBpFpVo9Lby6TSdSxoegi2CoQ8KBqMwXtSZOXMSiaixZlAO1/1K7m0curV+HiMdvyW9UO7h1UD6PAAEkEU9IAJIIMU0VygqFa2bCpMDAAiIEE+BKFYAIiBCGYDV3QZbevKAkhHL/h4Hz4A/jj1foAkhHJmXAJIRTfzfqBJCGT5fr6gi1thT6/18zLcy0rJmNNmY7tbcuZcbY5DLSlG5LmUxMs3HNXDb8HmP/PuerqdV6kOowRhTAGiW1bSJ2rZKFEYjFwoVhEUKYUm533gE04GzGNbGRQYsjYhmW2gVFTfe4hgmiiCW2U4hhSc6ZmW2g5LbbUW2ECilCwgZhagIKGDaVKsCohcEstmwaFBiZBUgmwHIUyCAPT2AJIR+vkAkhHVh+YCSEYHd8m/q/6ofhX6AEkIx+I3lnzgJIRwu9nbfjWXPb+oCSEZeWQ8r4fEBJCJd4CSESh6R8vEz8/sASQi2nBlZV+SBG7+QCSEeXus3gJIRX09eyHq9ujPaCe0RsxlMq41t48KTYvxn7oCIToUoIwKUoCMgGkk0cMgIMh0KWAiEoFKEESEMDHgQjaU00EQNlLBEkuDhkgiBAvBtpZpsgjIGlKIwmYLhkiIkhe4BJCK3ecfOAkhF3iJKt0sIcnKQ2XBKlMwRxuSquYi5Rg66pjuGrbMLW64YEZHLGoOjYFBYYIUBiIXEoWq4GxBEMyGRx1wizLMzDMbWqlBVDCyYW5ZWsrQsUtmUoo6tkpqg4LKNtrQRZlMcxaCIyCyZChClkrEqiVskwyUhsswoq2gUmQEYphcI3dgAJIIyCfc0ketJKZEN1IdEKGGtVxSFbDY0Gp5TjzieoNQ2pLEsw+Rx9mV8lBmfyJfpTbggRnYlJqXeRSjGerpAsp4TYJpYgbZCkP18RHpnPYszekAEEztmI9h7/nwHuEvgCr5AhaQtkH+OzNrNhoMO887DJb7AmYKiFpGW1wB8957IAC67uj7ZsQvqI3AMi/gHNCO3P9DmTPJBwxtm0FBsYoIxWGQtSOAOSME5n4Q1vXMpRaj3uzoASQiyhTPQCzFFJxZl+vyBI1k+EgCVtdpBIQEQQ8NK5sUzQdJzROFZFA8EXotO+uByKOgjDO/4wIVy5QnDtWSBs+tl/UD0YcJAF5jYwGWZpjdlsjHD+xrib4jMuvPDI2BqOYxL7Kvx9v42lUA99yuM5GrgzVQ4AJIQyVAIWCwWPZHJjAlNrhae+88CRqC+NLeWRL+ICSEMoVZM3kTVs8ToY1MFDRAZWrn7xRYWK4MZiy/GVgT3IDHa7PBK+9VWphurGXoxKylw0yDLCkOgAt2IDZDse96pURdihAyFjYA/BrsBt/4AFcDCGCLkwEm6U7C9Bh913OwmfY9vmiW7Z4Nnyzhf0E+3mBjxzMknbcRYxg/WAkhFURZ6vDkXMuPFa/z/Z0onQ4oDBI60m5jjwBExiYMSlmBzG5J2JB3brJzvzGGlPVlhawbDnsQs0UuN5aHNQ1Oq7rZgHSz5tYZIGudDTtlwHdoCSCT+rKPD0Fpn0VWshHeD7+EBa/RasgHdWblElaLRdZm0UlmnTeTIjDkJrBmIDWoR6HiiZ3VJoq9G7AKK0CQqIg1Fp638U7ASOTpqjSJnKwapQbne6440nMxtkZ2xfXLQBJCLyjJFCEqrWDWqYppSF7c53WykG0CP/yVao6tc3t8KSsdXlJUkFOjQDFBzIO3zgBoMsRJHNcGzs5DlCfJYIrzLMtv9CbYQB1Z9PXz7+VakQH7tavfNtJWCvE7hCyOaUjYGmoWCaNRAv/GM12EU2w7hOYk1KJBImRBGSNL1MLRRwyvqLeA1Qv3PrxgXSUk5CGiIoS10HvKWmbD7uhBZfruv/eSJ9wjA5G/A1mqQU0mRbr1uwkxUeO9qP2mLKBUVqAutrFqTJXTEXSALy0D8Kd1oAVqoL+As+IiceyMB4L8k+Py65JjPukjBBRgkIh9tJns6+5V9JRL7osaH+GPd6AOieXKWh8QswmE+ruKmXQtsl66EKDEsS1zU+OoCU5+woiad1SCXkmDY2xpGS7NQ6IMT9oCSEWLpxgmrVXZHVIxgVAqPSh6eLUBYp+UCSEH4UAQd0B+QnZLJLnZeKZePuFwi3aWyAlalNo4DPTZebvWwGbCEiBpkXZzCB+jEQFCIIyNnN5kwsVrIuTBEEP18l/SAkhG82pFlppuRK4BJCIPKQbBp/n2orKfauII8/Hyp9XKkXahGXEMOGYb/nYjvGgk2+e9aQTM7vbTanaBglZ2ps7yv1tG5ibEE00B6NKwEGgBqElxBdByKt9ZvRH/cYI9X/J4BvNAfYea347eA2ibOMuRyIk8hxYMaZKUPbfv3eIGrPxuWRcisak0NptiBjY00P40FD/ODO1GiQc76zjvxaOrHTxfcq3GAt6PfhIkDAeUBrK7e5LlnngXTqb9blsxra1I7D+YCSEZgtWpzz9UTnC6J0BmroFIRQO0td13CfGL1nW5y9wCSEMMPia1jUNQIGofZBAyCECSESfsRhYEBuLRvRoRe0i4Py84wJqzXCm0K7QxmIm+Wo65Ka6Dnu5/LKw48dQty1MDbBAt0o4EQtl3LVo9WGraQZZoxpEt5gWGhkzNZyYHt0gDWiEb6B0lt8j5+Z267P8QdkHSczyd41zqLza49B/rNKyxeMMmDjyu3LMG58GbrtXDHFKFMBFnC294ZmDpkrDMLQ6dbOJ04GOOZhyUsyva64KnFKqYJcccMLitByjDGNDDEaFJXhWqFFWzbo1xtBy4UuQRbDDKWpFUoZRpDajvqn1ez0n0J8Pxeen3voyQ+PieXwX06/EGWneyrIDIgJfWjFhw4xhzWV5zAw5cSXWvkiwM0dQGIDFdqvQcaj+zqGvgAewTOmz1nEPZY2X11aNoVa1MESgMg0aXLILLMUVoey8cvGzVcZiYeSXGRY8yQMyDGkz8Hj8TnK8QOg1B1cfbktF0A2DNTgewiJjkQ4I1ycpk3oxFFYxiixFAzD+GBWJ8nTgMScJ6vFLsIet/95pqx7fvgSQg+ZA3SIdzHTtur2ZzGTjoOS9EwG0XIvk5EDQYECJICBQJrVMlzjJtIRRAdlsmW5Il8YreQDSdvHi+Aw2DA1rw11wTEaMkNoGxK9gTaCAZFqkCYEoqI1nnUiyJ+ZrYYE3aucOPPABjUKg23LqNpx3G+v722LMFU8wR1goO226L7djPhGXjqa83MXrVSJGWMWqRASBId2JEAMJeUZT322q1Emagvj5fVt48lL/ko0CguLts1YFUOti9PAVgamkVBFEYcMg6s7KpHKyfuWw16FZ/kOcwOB4NfomolxgXg9M4RtABJBHl1Dhzk7qA1NteR4x0sf+ASQjX59q/J6+KVtd40P6ezn2Zj9mSO7u3lGLHVISJ21e2s2uhsYYu263gIuRiBRfXxxutMkAfU+W+9ye7uj9jD0n2bqbW1NoGbm4ZduGmjpK40RY0pbWraYmYVLMiUiQMjIFbjiKNuo67jmluNa5u0HUbMpiDaWMKOFkMBGVAULJkKFCWzRpbWShAQi2u7cMUXMxkKYJkxCosyxHJgWBehlDpdjzATmtlMa3JhTDBLFDJKJRMEpMkqoLCkLRnEsS2iZJDMEsyR7EPOdXv4V9/CYj4xttXxQCj52h528mmBZEh5dO2fOBJCHShyy2dwPcAf3MxyQGHcATrcsXmMknVLlzslSCkXRBejHwFmKnPifKVRM7A9L1mMdMoQu1pBf0Z2aA1h40lOZRMsUHsNdiKCHgmAMANdUVEXfF4qmD1rDhXHAP4KA0CQEBXNJQ82mnh6f21JlG3YCJCG59OmjW8VJ1CYtOBQgOieMMW/RNGIvYsUJJ3ccrUwCKS73OiodpMZ+YaFJmcOEEg8OE0OhItEiUw88NInqOvAccADAtlAonhYypm0SnfLY5hmDK41rRUAYr0EPvnM1iIzJ1oW52A7Cllk44Xi9tfNeOCJxS2xBl/GEMsbD6xkSMJgQQokEEZoGwgOQgYICMhx3pAnU56RKhVSpgciAgxSTNXJhC2WGBokVwwhEQb6PH0Tx8AkLcljv731Oe8OcisnAxkBCsXez2ZXeY7r7L7UTDG+fggJAaloFhOhtuv8Z43pFcITGgG1hy6uOpY5JcTkYQDiGuISkoE2pQMg4HCz9g64m4VQuHvY4ZEQleDIUIaGG3AGxMEWqEA03sIjZEcgEAcDLLplZmcIvwuL+NlvIiIlbLIoE/jKrVqqLCp95g+hbBgDrcHXXXnY7zKp3TcJ1XaeZRA6jY0LWIVQrsE0WiQmQ2MGRyxODXAOG6TILQHOed2cCThZvB7gEkInYkzFnoQLkldkBdaH2OmvPOd9tw4kEqSnnNuGUdmcC7bjZXBHwYQ8+tg+jHIKXYAiqTAwY1JAcrXYW2YIzUT10ObdXy6TdvDe0xoLUGyZr3bTlnN0NJEoziSlETiWSVATuZNEk0XJEDs5ju5ESSwdUPdkZjWTEQge2JD5kjS1OQATxMcFvLlXkQdACSEYWBz0AKZ61y6EoUzuERpB8w/mB2SzXPh0IYvjeDtOexDX31OXconKUkVqtdOm2QAdIWgU5x0643D09e6vIMvuLOKMOUyj4MbShVL1q4V06enXgkgEnOeePU8Y6E3ur2umJR4IBJTzpj3k5ZMJrz+UBJCM7LDgKBucCtmTNAbOgD7jrHzcfpk2yuHSxEEw4RJIopjbR9FKohw3Q/eQxZoyjEQWcWlZwmm7hW2XB4V4cl1FyH2OPCcY8u86PO8hynLggnO83QUKKqKcPRh0YLdLwrQRBgjCvS45k6OY10xn4nWbWk2OFJ0TUyho4phhmTCuS4LLGL0yzNUbjUa9KZiTcwgEM5IEnQLEfH6ZXUTb6VQk1o72SatHcmU66+YNuCcD1TVNv+jXiR7D1eVy0dapbtCpnmNzV17tJb0d8BIRtDi0UHuHQ7ZovRVEV5q22qqoSotVBzFcgo1voTcVdVXIqj2c5rbpHpmBaczeGvq8ifhZEPb9J9V9n0ZMjlnTjjjSTokMCzjNhkBhWfN7bMp+1x06E49gEkIPWjw60UMO2yGdP2mR9ICSEQpI5TBgPjqNDFADNxAIIAcdYlB7GLsAokxQpChVYQgAkRLttL0Fr5ZHIY5gkSLSWgH9RdyRThQkIta79sA==')))
| 290.290323
| 8,086
| 0.927436
| 266
| 8,999
| 31.135338
| 0.913534
| 0.00326
| 0.004588
| 0.006279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134349
| 0.022336
| 8,999
| 30
| 8,087
| 299.966667
| 0.807002
| 0.004667
| 0
| 0
| 0
| 0.142857
| 0.967113
| 0.96508
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0.142857
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
84106543b212b3205ee4c72dac437a76a2237a86
| 79
|
py
|
Python
|
habu/lib/auth/__init__.py
|
vincentfer/https-github.com-fportantier-habu
|
f9b305b8d1e06eda07ea06c2f5535336fedd7a87
|
[
"BSD-3-Clause"
] | 1
|
2020-08-16T05:49:37.000Z
|
2020-08-16T05:49:37.000Z
|
habu/lib/auth/__init__.py
|
dolboBobo/habu
|
d9f2df6ac66b7527f7a731f943af07726ff7e2e2
|
[
"BSD-3-Clause"
] | null | null | null |
habu/lib/auth/__init__.py
|
dolboBobo/habu
|
d9f2df6ac66b7527f7a731f943af07726ff7e2e2
|
[
"BSD-3-Clause"
] | 1
|
2020-05-14T23:35:11.000Z
|
2020-05-14T23:35:11.000Z
|
from habu.lib.auth.ftp import FTPAuth
from habu.lib.auth.ftps import FTPSAuth
| 19.75
| 39
| 0.810127
| 14
| 79
| 4.571429
| 0.642857
| 0.25
| 0.34375
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 79
| 3
| 40
| 26.333333
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
844b876f571bd26ffcf99026017b26bc41e9aae7
| 11,120
|
py
|
Python
|
misc/dataLoader.py
|
roma-ghewari/visDial.pytorch
|
03fe6e679170d54a985b6402f07fea4a5fb4dd73
|
[
"MIT"
] | null | null | null |
misc/dataLoader.py
|
roma-ghewari/visDial.pytorch
|
03fe6e679170d54a985b6402f07fea4a5fb4dd73
|
[
"MIT"
] | null | null | null |
misc/dataLoader.py
|
roma-ghewari/visDial.pytorch
|
03fe6e679170d54a985b6402f07fea4a5fb4dd73
|
[
"MIT"
] | 2
|
2019-11-03T04:06:16.000Z
|
2019-11-05T22:44:23.000Z
|
import torch.utils.data as data
from PIL import Image
import torch
import numpy as np
import h5py
import json
import pdb
import random
from misc.utils import repackage_hidden, clip_gradient, adjust_learning_rate, decode_txt
class train(data.Dataset) : # torch wrapper
def __init__(self, input_img_h5, input_ques_h5, input_json, negative_sample, num_val, data_split) :
print('DataLoader loading: %s' % data_split)
print('Loading image feature from %s' % input_img_h5)
if data_split == 'test' :
split = 'val'
else :
split = 'train' # train and val split both corresponding to 'train'
f = json.load(open(input_json, 'r'))
self.itow = f['itow']
self.img_info = f['img_' + split]
# get the data split.
total_num = len(self.img_info)
if data_split == 'train' :
s = 0
e = total_num - num_val
elif data_split == 'val' :
s = total_num - num_val
e = total_num
else :
s = 0
e = total_num
self.img_info = self.img_info[s :e]
print('%s number of data: %d' % (data_split, e - s))
# load the data.
f = h5py.File(input_img_h5, 'r')
self.imgs = f['images_' + split][s :e]
f.close()
print('Loading txt from %s' % input_ques_h5)
f = h5py.File(input_ques_h5, 'r')
self.ques = f['ques_' + split][s :e]
self.ans = f['ans_' + split][s :e]
self.cap = f['cap_' + split][s :e]
self.ques_len = f['ques_len_' + split][s :e]
self.ans_len = f['ans_len_' + split][s :e]
self.cap_len = f['cap_len_' + split][s :e]
self.ans_ids = f['ans_index_' + split][s :e]
self.opt_ids = f['opt_' + split][s :e]
self.opt_list = f['opt_list_' + split][:]
self.opt_len = f['opt_len_' + split][:]
f.close()
self.ques_length = self.ques.shape[2] # Max word length of a question. Current Value is 16
self.ans_length = self.ans.shape[2] # Max word length of answer. Current value is 8
self.his_length = self.ques_length + self.ans_length # Max word length of question and answer combined. Current value is 16+8 = 24
self.vocab_size = len(self.itow) + 1
print('Vocab Size: %d' % self.vocab_size)
self.split = split
self.total_qa_pairs = 10
self.negative_sample = negative_sample
def __getitem__(self, index) :
# get the image
img = torch.from_numpy(self.imgs[index])
# get the history
#Format of one row of his:
#0 0 .. 0 0 q q q q a a a a
#leading zero - (question words - answer words) OR leading zero - caption words
his = np.zeros((self.total_qa_pairs, self.his_length))
his[0, self.his_length - self.cap_len[index] :] = self.cap[index, :self.cap_len[index]]
ques = np.zeros((self.total_qa_pairs, self.ques_length))
ans_vocab_first = np.zeros((self.total_qa_pairs, self.ans_length + 1))
ans_vocab_last = np.zeros((self.total_qa_pairs, self.ans_length + 1))
ques_trailing_zeros = np.zeros((self.total_qa_pairs, self.ques_length))
opt_ans_vocab_first = np.zeros((self.total_qa_pairs, self.negative_sample, self.ans_length + 1))
ans_len = np.zeros((self.total_qa_pairs))
opt_ans_len = np.zeros((self.total_qa_pairs, self.negative_sample))
ans_idx = np.zeros((self.total_qa_pairs))
opt_ans_idx = np.zeros((self.total_qa_pairs, self.negative_sample))
for i in range(self.total_qa_pairs) :
# get the index
q_len = self.ques_len[index, i]
a_len = self.ans_len[index, i]
qa_len = q_len + a_len
if i + 1 < self.total_qa_pairs :
his[i + 1, self.his_length - qa_len :self.his_length - a_len] = self.ques[index, i, :q_len]
his[i + 1, self.his_length - a_len :] = self.ans[index, i, :a_len]
ques[i, self.ques_length - q_len :] = self.ques[index, i, :q_len]
ques_trailing_zeros[i, :q_len] = self.ques[index, i, :q_len]
ans_vocab_first[i, 1 :a_len + 1] = self.ans[index, i, :a_len]
ans_vocab_first[i, 0] = self.vocab_size
ans_vocab_last[i, :a_len] = self.ans[index, i, :a_len]
ans_vocab_last[i, a_len] = self.vocab_size
ans_len[i] = self.ans_len[index, i]
opt_ids = self.opt_ids[index, i] # since python start from 0
# random select the negative samples.
ans_idx[i] = opt_ids[self.ans_ids[index, i]]
# exclude the gt index.
opt_ids = np.delete(opt_ids, ans_idx[i], 0)
random.shuffle(opt_ids)
for j in range(self.negative_sample) :
ids = opt_ids[j]
opt_ans_idx[i, j] = ids
opt_len = self.opt_len[ids]
opt_ans_len[i, j] = opt_len
opt_ans_vocab_first[i, j, :opt_len] = self.opt_list[ids, :opt_len]
opt_ans_vocab_first[i, j, opt_len] = self.vocab_size
his = torch.from_numpy(his)
ques = torch.from_numpy(ques)
ans_vocab_first = torch.from_numpy(ans_vocab_first)
ans_vocab_last = torch.from_numpy(ans_vocab_last)
ques_trailing_zeros = torch.from_numpy(ques_trailing_zeros)
ans_len = torch.from_numpy(ans_len)
opt_ans_len = torch.from_numpy(opt_ans_len)
opt_ans_vocab_first = torch.from_numpy(opt_ans_vocab_first)
ans_idx = torch.from_numpy(ans_idx)
opt_ans_idx = torch.from_numpy(opt_ans_idx)
return img, his, ques, ans_vocab_first, ans_vocab_last, ans_len, ans_idx, ques_trailing_zeros, \
opt_ans_vocab_first, opt_ans_len, opt_ans_idx
def __len__(self) :
return self.ques.shape[0]
class validate(data.Dataset) : # torch wrapper
def __init__(self, input_img_h5, input_ques_h5, input_json, negative_sample, num_val, data_split) :
print('DataLoader loading: %s' % data_split)
print('Loading image feature from %s' % input_img_h5)
if data_split == 'test' :
split = 'val'
else :
split = 'train' # train and val split both corresponding to 'train'
f = json.load(open(input_json, 'r'))
self.itow = f['itow']
self.img_info = f['img_' + split]
# get the data split.
total_num = len(self.img_info)
if data_split == 'train' :
e = total_num - num_val
elif data_split == 'val' :
s = total_num - num_val
e = total_num
else :
s = 0
e = total_num
self.img_info = self.img_info[s :e]
print('%s number of data: %d' % (data_split, e - s))
# load the data.
f = h5py.File(input_img_h5, 'r')
self.imgs = f['images_' + split][s :e]
f.close()
print('Loading txt from %s' % input_ques_h5)
f = h5py.File(input_ques_h5, 'r')
self.ques = f['ques_' + split][s :e]
self.ans = f['ans_' + split][s :e]
self.cap = f['cap_' + split][s :e]
self.ques_len = f['ques_len_' + split][s :e]
self.ans_len = f['ans_len_' + split][s :e]
self.cap_len = f['cap_len_' + split][s :e]
self.ans_ids = f['ans_index_' + split][s :e]
self.opt_ids = f['opt_' + split][s :e]
self.opt_list = f['opt_list_' + split][:]
self.opt_len = f['opt_len_' + split][:]
f.close()
self.ques_length = self.ques.shape[2]
self.ans_length = self.ans.shape[2]
self.his_length = self.ques_length + self.ans_length
self.vocab_size = len(self.itow) + 1
print('Vocab Size: %d' % self.vocab_size)
self.split = split
self.total_qa_pairs = 10
self.negative_sample = negative_sample
def __getitem__(self, index) :
# get the image
img_id = self.img_info[index]['imgId']
img = torch.from_numpy(self.imgs[index])
# get the history
his = np.zeros((self.total_qa_pairs, self.his_length))
his[0, self.his_length - self.cap_len[index] :] = self.cap[index, :self.cap_len[index]]
ques = np.zeros((self.total_qa_pairs, self.ques_length))
ans_vocab_first = np.zeros((self.total_qa_pairs, self.ans_length + 1))
ans_vocab_last = np.zeros((self.total_qa_pairs, self.ans_length + 1))
ques_trailing_zeros = np.zeros((self.total_qa_pairs, self.ques_length))
opt_ans_vocab_first = np.zeros((self.total_qa_pairs, 100, self.ans_length + 1))
ans_idx = np.zeros(self.total_qa_pairs)
opt_ans_vocab_last = np.zeros((self.total_qa_pairs, 100, self.ans_length + 1))
ans_len = np.zeros((self.total_qa_pairs))
opt_ans_len = np.zeros((self.total_qa_pairs, 100))
for i in range(self.total_qa_pairs) :
# get the index
q_len = self.ques_len[index, i]
a_len = self.ans_len[index, i]
qa_len = q_len + a_len
if i + 1 < self.total_qa_pairs :
ques_ans = np.concatenate([self.ques[index, i, :q_len], self.ans[index, i, :a_len]])
his[i + 1, self.his_length - qa_len :] = ques_ans
ques[i, self.ques_length - q_len :] = self.ques[index, i, :q_len]
ques_trailing_zeros[i, :q_len] = self.ques[index, i, :q_len]
ans_vocab_first[i, 1 :a_len + 1] = self.ans[index, i, :a_len]
ans_vocab_first[i, 0] = self.vocab_size
ans_vocab_last[i, :a_len] = self.ans[index, i, :a_len]
ans_vocab_last[i, a_len] = self.vocab_size
ans_idx[i] = self.ans_ids[index, i] # since python start from 0
opt_ids = self.opt_ids[index, i] # since python start from 0
ans_len[i] = self.ans_len[index, i]
for j, ids in enumerate(opt_ids) :
opt_len = self.opt_len[ids]
opt_ans_vocab_first[i, j, 1 :opt_len + 1] = self.opt_list[ids, :opt_len]
opt_ans_vocab_first[i, j, 0] = self.vocab_size
opt_ans_vocab_last[i, j, :opt_len] = self.opt_list[ids, :opt_len]
opt_ans_vocab_last[i, j, opt_len] = self.vocab_size
opt_ans_len[i, j] = opt_len
opt_ans_vocab_first = torch.from_numpy(opt_ans_vocab_first)
opt_ans_vocab_last = torch.from_numpy(opt_ans_vocab_last)
ans_idx = torch.from_numpy(ans_idx)
his = torch.from_numpy(his)
ques = torch.from_numpy(ques)
ans_vocab_first = torch.from_numpy(ans_vocab_first)
ans_vocab_last = torch.from_numpy(ans_vocab_last)
ques_trailing_zeros = torch.from_numpy(ques_trailing_zeros)
ans_len = torch.from_numpy(ans_len)
opt_ans_len = torch.from_numpy(opt_ans_len)
return img, his, ques, ans_vocab_first, ans_vocab_last, ques_trailing_zeros, opt_ans_vocab_first, \
opt_ans_vocab_last, ans_idx, ans_len, opt_ans_len, img_id
def __len__(self) :
return self.ques.shape[0]
| 39.856631
| 139
| 0.598831
| 1,705
| 11,120
| 3.599413
| 0.080352
| 0.05475
| 0.046603
| 0.067786
| 0.890826
| 0.876487
| 0.859215
| 0.829233
| 0.792244
| 0.739938
| 0
| 0.01004
| 0.283453
| 11,120
| 278
| 140
| 40
| 0.760166
| 0.065018
| 0
| 0.785366
| 0
| 0
| 0.041365
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029268
| false
| 0
| 0.043902
| 0.009756
| 0.102439
| 0.04878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
845203c3d29266bc21334edd453839798d1bd931
| 74
|
py
|
Python
|
tests/python/conftest.py
|
AntaresSimulatorTeam/antares-xpansion
|
1aa5775e78d35dd07fc4bd6d0423ce534adf0816
|
[
"Apache-2.0"
] | 5
|
2021-08-25T09:29:14.000Z
|
2022-03-09T08:59:07.000Z
|
tests/python/conftest.py
|
AntaresSimulatorTeam/antares-xpansion
|
1aa5775e78d35dd07fc4bd6d0423ce534adf0816
|
[
"Apache-2.0"
] | 251
|
2020-10-30T09:49:29.000Z
|
2022-03-31T12:28:48.000Z
|
tests/python/conftest.py
|
AntaresSimulatorTeam/antares-xpansion
|
1aa5775e78d35dd07fc4bd6d0423ce534adf0816
|
[
"Apache-2.0"
] | 5
|
2020-12-16T15:11:33.000Z
|
2021-06-18T05:03:59.000Z
|
import sys
sys.path.insert(0, "../../src/python")
sys.path.insert(0, ".")
| 18.5
| 38
| 0.621622
| 12
| 74
| 3.833333
| 0.583333
| 0.304348
| 0.565217
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.081081
| 74
| 3
| 39
| 24.666667
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0.22973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
fff865695858af8f61a42c97bcea12feed82c4e6
| 33
|
py
|
Python
|
j4j_proxy/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_proxy/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_proxy/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
from .j4j_proxy import J4J_Proxy
| 16.5
| 32
| 0.848485
| 6
| 33
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0.121212
| 33
| 1
| 33
| 33
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
084f5fcc90dca2f8a31a2c90f0cdfd63af6ec187
| 8,768
|
py
|
Python
|
marsyas-vamp/marsyas/src/marsyas_python/osc_tests.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/src/marsyas_python/osc_tests.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/src/marsyas_python/osc_tests.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
from marsyas import *
from marsyas_util import create
from synth_util import *
def main():
additive()
aliasing()
allpass()
blit()
dpw()
plucked()
waveguid()
def waveguide():
gen = ["Series/fmnet", ["ADSR/pitch","WaveguideOsc/waveguide","ADSR/adsr","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("Gain/gain/mrs_real/gain", 0.6)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("ADSR/pitch/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/pitch/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("WaveguideOsc/waveguide/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("SoundFileSink/dest2/mrs_string/filename", "WaveguideTest.wav")
play_melody(network)
network.updControl("ADSR/pitch/mrs_bool/bypass", MarControlPtr.from_bool(True))
network.updControl("SoundFileSink/dest2/mrs_string/filename", "WaveguideTestPitch.wav")
play_melody(network)
def additive():
gen = ["Series/fmnet", ["AdditiveOsc/additve","ADSR/adsr","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("AdditiveOsc/additve/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("SoundFileSink/dest2/mrs_string/filename", "AdditiveTestSaw.wav")
play_melody(network)
def plucked():
gen = ["Series/fmnet", ["Plucked/plucked","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("Plucked/plucked/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("Plucked/plucked/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("SoundFileSink/dest2/mrs_string/filename", "PluckedTest.wav")
play_melody(network)
def dpw():
gen = ["Series/fmnet", ["DPWOsc/dpw","ADSR/adsr","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("DPWOsc/dpw/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("DPWOsc/dpw/mrs_natural/type", 0)
network.updControl("SoundFileSink/dest2/mrs_string/filename", "DPWSaw.wav")
play_melody(network)
def blit():
gen = ["Series/fmnet", ["BlitOsc/blit","ADSR/adsr","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("BlitOsc/blit/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("BlitOsc/blit/mrs_natural/type", 0)
network.updControl("SoundFileSink/dest2/mrs_string/filename", "BlitTestSaw.wav")
notes = [midi2freq(i) for i in range(10,100)]
play_melody(network, notes=notes)
network.updControl("BlitOsc/blit/mrs_natural/type", 1)
network.updControl("SoundFileSink/dest2/mrs_string/filename", "BlitTestSquare.wav")
play_melody(network)
def allpass():
gen = ["Series/fmnet", ["APDelayOsc/apdelay","ADSR/adsr","Gain/gain","SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("APDelayOsc/apdelay/mrs_bool/noteon", MarControlPtr.from_bool(True))
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("APDelayOsc/apdelay/mrs_real/frequency", "mrs_real/frequency")
# Set the systems sample rate
sample_rate = 44100.0
network.updControl("mrs_real/israte", sample_rate)
# Set up Audio File
network.updControl("APDelayOsc/apdelay/mrs_natural/type", 0)
network.updControl("SoundFileSink/dest2/mrs_string/filename", "APTestSaw.wav")
play_melody(network)
def aliasing():
mod = ["Fanout/fo", ["ADSR/pitch", "ADSR/pwm"]]
osc = ["Series/osc",[mod, "AliasingOsc/osc"]]
gen = ["Series/fmnet",[osc, "ADSR/adsr", "Gain/gain", "SoundFileSink/dest2"]]
# Create network and intialize parameter mapping
network = create(gen)
# Set the systems sample rate
sample_rate = 44100.0
network.updControl( "mrs_real/israte", sample_rate)
network.updControl("Series/osc/Fanout/fo/ADSR/pwm/mrs_real/aTime", 0.7)
network.updControl("Gain/gain/mrs_real/gain", 0.7)
network.updControl("ADSR/adsr/mrs_real/aTime", 0.1)
network.updControl("Gain/gain/mrs_real/gain", 0.8)
# These mapping are to make the system work with play melody
network.linkControl("ADSR/adsr/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("ADSR/adsr/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("Series/osc/Fanout/fo/ADSR/pwm/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("Series/osc/Fanout/fo/ADSR/pwm/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("Series/osc/Fanout/fo/ADSR/pitch/mrs_bool/noteon", "mrs_bool/noteon")
network.linkControl("Series/osc/Fanout/fo/ADSR/pitch/mrs_bool/noteoff", "mrs_bool/noteoff")
network.linkControl("Series/osc/AliasingOsc/osc/mrs_real/frequency", "mrs_real/frequency")
# PWM Example
network.updControl("Series/osc/AliasingOsc/osc/mrs_natural/type", 1)
network.updControl("Series/osc/Fanout/fo/ADSR/pwm/mrs_bool/bypass", MarControlPtr.from_bool(True))
network.updControl("Series/osc/AliasingOsc/osc/mrs_bool/cyclicin", MarControlPtr.from_bool(True))
network.updControl("SoundFileSink/dest2/mrs_string/filename", "AliasingTestPWM.wav")
play_melody(network)
# Saw Wave example
network.updControl("Series/osc/AliasingOsc/osc/mrs_natural/type", 0)
network.updControl("Series/osc/Fanout/fo/ADSR/pwm/mrs_bool/bypass", MarControlPtr.from_bool(False))
network.updControl("Series/osc/AliasingOsc/osc/mrs_bool/cyclicin", MarControlPtr.from_bool(False))
network.updControl("SoundFileSink/dest2/mrs_string/filename", "AliasingTestSaw.wav")
play_melody(network)
# Pitch modulation example
network.updControl("Series/osc/Fanout/fo/ADSR/pitch/mrs_bool/bypass", MarControlPtr.from_bool(True))
network.updControl("SoundFileSink/dest2/mrs_string/filename", "AliasingTestPitch.wav")
play_melody(network)
network.updControl("Series/osc/Fanout/fo/ADSR/pitch/mrs_bool/bypass", MarControlPtr.from_bool(False))
# Square Wave example
network.updControl("Series/osc/AliasingOsc/osc/mrs_natural/type", 1)
network.updControl("SoundFileSink/dest2/mrs_string/filename", "AliasingTestSquare.wav")
play_melody(network)
if __name__ == "__main__":
main()
| 49.536723
| 113
| 0.733006
| 1,157
| 8,768
| 5.429559
| 0.096802
| 0.132601
| 0.043458
| 0.049666
| 0.864374
| 0.828717
| 0.809615
| 0.793537
| 0.780643
| 0.773002
| 0
| 0.01363
| 0.12979
| 8,768
| 176
| 114
| 49.818182
| 0.809699
| 0.128422
| 0
| 0.442748
| 0
| 0
| 0.458213
| 0.307359
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061069
| false
| 0.053435
| 0.022901
| 0
| 0.083969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
f24a05cd29b6a9ea1de56d0fea1c2ccfc8ec8ff8
| 14,113
|
py
|
Python
|
lbpketappipig/MC_12_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
lbpketappipig/MC_12_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
lbpketappipig/MC_12_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
#-- GAUDI jobOptions generated on Sun Feb 7 02:07:34 2016
#-- Contains event types :
#-- 15104201 - 151 files - 3002173 events - 906.80 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124834
#-- StepId : 124834
#-- StepName : Reco14a for MC
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125836
#-- StepId : 125836
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08 - Implicit merging.
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000002_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000003_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000004_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000005_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000007_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000008_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000009_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000010_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000011_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000012_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000013_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000014_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000015_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000016_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000017_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000018_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000019_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000020_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000021_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000022_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000023_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000024_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000025_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000026_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000027_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000028_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000029_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000030_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000031_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000032_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000033_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000034_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000035_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000036_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000037_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000039_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000040_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000041_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000042_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000043_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000045_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000046_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000047_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000048_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000049_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000050_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000051_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000052_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000053_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000055_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000056_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000058_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000059_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000060_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000061_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000062_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000063_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000064_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000065_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000066_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000067_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000068_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000069_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000071_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000072_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000073_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000074_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000075_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000076_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000077_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000078_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000079_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000080_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000081_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000082_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000083_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000084_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000085_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000087_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000088_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000090_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000091_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000092_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000093_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000094_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000095_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000096_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000097_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000098_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000099_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000100_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000101_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000102_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000103_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000105_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000106_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000107_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000108_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000109_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000110_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000111_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000112_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000113_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000114_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000115_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000116_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000117_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000118_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000119_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000120_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000121_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000122_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000123_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000124_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000125_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000126_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000127_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000128_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000129_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000130_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000131_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000132_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000133_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000134_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000135_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000136_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000137_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000138_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000139_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000141_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000142_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000143_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000144_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000145_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000146_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000147_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000148_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000149_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000150_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000151_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000152_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000153_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000154_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000155_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000156_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000158_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000159_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000160_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000161_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000162_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00038895/0000/00038895_00000163_2.AllStreams.dst'
], clear=True)
| 75.069149
| 215
| 0.814852
| 2,096
| 14,113
| 5.342557
| 0.11355
| 0.350598
| 0.121361
| 0.175299
| 0.829612
| 0.829612
| 0.829612
| 0.829612
| 0.829612
| 0.825505
| 0
| 0.368394
| 0.023879
| 14,113
| 187
| 216
| 75.470588
| 0.444469
| 0.078722
| 0
| 0
| 1
| 0.980519
| 0.942822
| 0.942514
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.012987
| 0
| 0.012987
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
f24c46b91ab7c58133fad860002aa12fe639c9a3
| 1,404
|
py
|
Python
|
Python Files/maze.py
|
kaskrex/pythonexercises
|
2afff8495529f9b1fc1d3f40c03d41227c1f253f
|
[
"MIT"
] | 1
|
2021-08-10T05:40:35.000Z
|
2021-08-10T05:40:35.000Z
|
Python Files/maze.py
|
kaskrex/pythonexercises
|
2afff8495529f9b1fc1d3f40c03d41227c1f253f
|
[
"MIT"
] | null | null | null |
Python Files/maze.py
|
kaskrex/pythonexercises
|
2afff8495529f9b1fc1d3f40c03d41227c1f253f
|
[
"MIT"
] | null | null | null |
a = input("Choose between 2 doors")
if a == str(1):
print("Door #1 choosen")
elif a == str(2):
print("Door #2 choosen")
else:
print("Wrong door! Choose again!")
#if r = 1, restart counter
r = 1
while r == 1:
a = input("Choose between 2 doors")
if a == str(1):
print("Door #1 choosen")
b = input("Saw a bear, run or yell?")
if b == "run":
print("Which path do you take?")
elif b == "yell":
print("Sore throat no voice")
else:
print("Die only, but heaven doesn't want you. Try again!")
r = 1
while r == 1:
b = input("Saw a bear, run or yell?")
if b == "run":
print("Which path do you take?")
r = 0
elif b == "yell":
print("Sore throat no voice")
r = 0
else:
print("Die only, but heaven doesn't want you. Try again!")
r = 1
elif a == str(2):
print("Door #2 choosen")
r = 0
| 29.25
| 82
| 0.342593
| 147
| 1,404
| 3.272109
| 0.29932
| 0.024948
| 0.049896
| 0.079002
| 0.889813
| 0.856549
| 0.856549
| 0.856549
| 0.619543
| 0.619543
| 0
| 0.030498
| 0.556268
| 1,404
| 48
| 83
| 29.25
| 0.741573
| 0.017806
| 0
| 0.970588
| 0
| 0
| 0.271936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.323529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2b179aa228cb298e8c51dacd9bf810f88c49ab0
| 171
|
py
|
Python
|
train.py
|
chiragjn/deep-char-cnn-lstm
|
d50d85742c1c1396dc199708683cdbf77a58425d
|
[
"MIT"
] | 20
|
2018-02-15T17:55:09.000Z
|
2021-05-08T04:56:09.000Z
|
train.py
|
chiragjn/deep-char-cnn-lstm
|
d50d85742c1c1396dc199708683cdbf77a58425d
|
[
"MIT"
] | null | null | null |
train.py
|
chiragjn/deep-char-cnn-lstm
|
d50d85742c1c1396dc199708683cdbf77a58425d
|
[
"MIT"
] | 6
|
2018-07-20T03:16:18.000Z
|
2019-11-11T17:36:59.000Z
|
# TODO: Add loading utils
# TODO: Add preprocessing and padding utils
# TODO: Add batching utils
# TODO: Add model training code
# TODO: Add model continue-training code
| 24.428571
| 43
| 0.760234
| 25
| 171
| 5.2
| 0.48
| 0.269231
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 171
| 6
| 44
| 28.5
| 0.921986
| 0.929825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.166667
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29a1ac5fc74445cb4baee2f28966650dc3703ba5
| 202
|
py
|
Python
|
vedro_pyppeteer/__init__.py
|
nikitanovosibirsk/vedro-pyppeteer
|
ff869659619410a2aa9e9c2bba677033cc4fce38
|
[
"Apache-2.0"
] | null | null | null |
vedro_pyppeteer/__init__.py
|
nikitanovosibirsk/vedro-pyppeteer
|
ff869659619410a2aa9e9c2bba677033cc4fce38
|
[
"Apache-2.0"
] | null | null | null |
vedro_pyppeteer/__init__.py
|
nikitanovosibirsk/vedro-pyppeteer
|
ff869659619410a2aa9e9c2bba677033cc4fce38
|
[
"Apache-2.0"
] | null | null | null |
from ._vedro_pyppeteer import Pyppeteer, PyppeteerPlugin, opened_browser, opened_browser_page
__version__ = "1.0.0"
__all__ = ("Pyppeteer", "PyppeteerPlugin", "opened_browser", "opened_browser_page",)
| 40.4
| 93
| 0.79703
| 23
| 202
| 6.304348
| 0.521739
| 0.358621
| 0.413793
| 0.510345
| 0.744828
| 0.744828
| 0.744828
| 0
| 0
| 0
| 0
| 0.016216
| 0.084158
| 202
| 4
| 94
| 50.5
| 0.767568
| 0
| 0
| 0
| 0
| 0
| 0.306931
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
29d03e3a161a02c127ef42a0f8b9aef7c95c094b
| 200
|
py
|
Python
|
fractal_ml/__init__.py
|
EleutherAI/fractal-ml
|
531fe34b165d728be7ac9a7ab3080dfddbb6395d
|
[
"MIT"
] | 2
|
2021-03-01T18:25:16.000Z
|
2021-10-12T06:38:59.000Z
|
fractal_ml/__init__.py
|
StellaAthena/fractal-ml
|
2fbf58631a07df32e24dc4a19453b4e2321b3c19
|
[
"MIT"
] | null | null | null |
fractal_ml/__init__.py
|
StellaAthena/fractal-ml
|
2fbf58631a07df32e24dc4a19453b4e2321b3c19
|
[
"MIT"
] | 3
|
2020-06-03T19:48:33.000Z
|
2021-03-05T21:57:44.000Z
|
from .fractal_ml import generate_direction, approx_box_counting, generate_labeled_dust
__all__ = ["generate_direction", "generate_labeled_direction","approx_box_counting", "generate_labeled_dust"]
| 33.333333
| 109
| 0.845
| 24
| 200
| 6.333333
| 0.5
| 0.296053
| 0.236842
| 0.342105
| 0.592105
| 0.592105
| 0.592105
| 0
| 0
| 0
| 0
| 0
| 0.07
| 200
| 5
| 110
| 40
| 0.817204
| 0
| 0
| 0
| 1
| 0
| 0.424242
| 0.237374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
29eaf415ae4bd1696f66200ea4cbaaa33d331c9c
| 26,705
|
py
|
Python
|
L1Trigger/L1CaloTrigger/test/test_Phase1L1TJets_cfg.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/L1CaloTrigger/test/test_Phase1L1TJets_cfg.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/L1CaloTrigger/test/test_Phase1L1TJets_cfg.py
|
thesps/cmssw
|
ad5315934948ce96699b29cc1d5b03a59f99634f
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
from math import pi
process = cms.Process("TEST")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 1
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job1.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job10.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job100.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job101.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job102.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job103.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job104.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job105.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job106.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job107.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job108.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job109.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job11.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job110.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job111.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job112.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job113.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job114.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job115.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job116.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job117.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job118.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job119.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job12.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job120.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job121.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job122.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job123.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job124.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job125.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job126.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job127.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job128.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job129.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job13.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job130.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job131.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job132.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job133.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job134.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job135.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job136.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job137.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job138.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job139.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job14.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job140.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job141.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job142.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job143.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job144.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job145.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job146.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job147.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job148.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job149.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job15.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job150.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job151.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job152.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job153.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job154.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job155.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job156.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job157.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job158.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job159.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job16.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job160.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job161.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job162.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job163.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job164.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job165.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job166.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job167.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job168.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job169.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job17.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job170.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job171.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job172.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job173.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job174.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job175.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job176.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job177.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job178.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job179.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job18.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job180.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job181.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job182.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job183.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job184.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job185.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job186.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job187.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job188.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job189.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job19.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job190.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job191.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job192.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job193.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job194.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job195.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job196.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job197.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job198.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job199.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job2.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job20.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job200.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job201.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job202.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job203.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job204.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job205.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job206.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job207.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job208.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job209.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job21.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job210.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job211.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job212.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job213.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job214.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job215.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job216.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job217.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job218.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job219.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job22.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job220.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job221.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job222.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job223.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job224.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job225.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job226.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job227.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job228.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job229.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job23.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job230.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job231.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job232.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job233.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job234.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job235.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job236.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job237.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job238.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job239.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job24.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job240.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job241.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job242.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job243.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job244.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job245.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job246.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job247.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job248.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job249.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job25.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job250.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job26.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job27.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job28.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job29.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job3.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job30.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job31.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job32.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job33.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job34.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job35.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job36.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job37.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job38.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job39.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job4.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job40.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job41.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job42.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job43.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job44.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job45.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job46.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job47.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job48.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job49.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job5.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job50.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job51.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job52.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job53.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job54.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job55.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job56.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job57.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job58.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job59.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job6.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job60.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job61.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job62.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job63.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job64.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job65.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job66.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job67.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job68.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job69.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job7.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job70.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job71.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job72.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job73.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job74.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job75.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job76.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job77.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job78.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job79.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job8.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job80.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job81.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job82.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job83.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job84.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job85.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job86.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job87.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job88.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job89.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job9.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job90.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job91.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job92.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job93.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job94.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job95.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job96.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job97.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job98.root",
"file:/hdfs/user/sb17498/CMS_Phase_2/jetMETStudies/TTBar_PU200/inputs104X_TTbar_PU200_job99.root",
)
)
process.load('L1Trigger.L1CaloTrigger.Phase1L1TJets_cff')
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('myOutputFile.root'),
outputCommands = cms.untracked.vstring(
"drop *",
"keep *_Phase1L1TJetProducer_*_*",
"keep *_ak4GenJetsNoNu_*_*",
"keep *_Phase1L1TJetCalibrator_*_*",
),
)
process.p = cms.Path(process.Phase1L1TJetsSequence)
process.e = cms.EndPath(process.out)
| 95.375
| 103
| 0.83329
| 3,837
| 26,705
| 5.405786
| 0.080792
| 0.241057
| 0.144634
| 0.229004
| 0.90377
| 0.90377
| 0.90377
| 0.90377
| 0.90377
| 0.90377
| 0
| 0.17375
| 0.050215
| 26,705
| 280
| 104
| 95.375
| 0.644023
| 0
| 0
| 0
| 0
| 0
| 0.902943
| 0.899648
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007353
| 0
| 0.007353
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
29ef83cf05dcbbb9fee0cf81db871bb57178cf8a
| 6,086
|
py
|
Python
|
tests/system/presenter/test_get_users.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
tests/system/presenter/test_get_users.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
tests/system/presenter/test_get_users.py
|
FinnStutzenstein/openslides-backend
|
fffc152f79d3446591e07a6913d9fdf30b46f577
|
[
"MIT"
] | null | null | null |
from .base import BasePresenterTestCase
class TestGetUsers(BasePresenterTestCase):
def test_temporary_filter_pagenation(self) -> None:
self.set_models(
{
"meeting/1": {"name": "meeting1"},
"user/2": {
"username": "florian",
"first_name": "Florian",
"last_name": "Freiheit",
},
"user/3": {
"username": "test",
"first_name": "Testy",
"last_name": "Tester",
},
"user/4": {
"username": "john",
"first_name": "John",
"last_name": "Xylon",
"meeting_id": 1,
},
}
)
status_code, data = self.request(
"get_users",
{
"start_index": 1,
"entries": 2,
"sort_criteria": ["username"],
"reverse": False,
"include_temporary": False,
},
)
self.assertEqual(status_code, 200)
self.assertEqual(data, {"users": [2, 3]})
def test_keywords_filter(self) -> None:
self.set_models(
{
"meeting/1": {"name": "meeting1"},
"user/2": {
"username": "florian",
"first_name": "Florian",
"last_name": "Freiheit",
},
"user/3": {
"username": "test",
"first_name": "Testy",
"last_name": "Tester",
},
"user/4": {
"username": "john",
"first_name": "John",
"last_name": "Xylon",
"meeting_id": 1,
},
"user/5": {
"username": "xorr",
"first_name": "John",
"last_name": "Xorr",
},
}
)
status_code, data = self.request(
"get_users",
{
"start_index": 0,
"entries": 100,
"sort_criteria": ["first_name", "username"],
"reverse": True,
"include_temporary": True,
"filter": "John",
},
)
self.assertEqual(status_code, 200)
self.assertEqual(data, {"users": [5, 4]})
def test_keywords_pagenated(self) -> None:
self.set_models(
{
"meeting/1": {"name": "meeting1"},
"user/2": {
"username": "florian",
"first_name": "Florian",
"last_name": "Freiheit",
},
"user/3": {
"username": "test",
"first_name": "Testy",
"last_name": "Tester",
},
"user/4": {
"username": "john",
"first_name": "John",
"last_name": "Xylon",
"meeting_id": 1,
},
"user/5": {
"username": "xorr",
"first_name": "John",
"last_name": "Xorr",
},
}
)
status_code, data = self.request(
"get_users",
{
"start_index": 1,
"entries": 1,
"sort_criteria": ["first_name", "username"],
"reverse": True,
"include_temporary": True,
"filter": "John",
},
)
self.assertEqual(status_code, 200)
self.assertEqual(data, {"users": [4]})
def test_check_defaults(self) -> None:
self.set_models(
{
"meeting/1": {"name": "meeting1"},
"user/2": {
"username": "florian",
"first_name": "Florian",
"last_name": "Freiheit",
},
"user/3": {
"username": "test",
"first_name": "Testy",
"last_name": "Tester",
},
"user/4": {
"username": "john",
"first_name": "John",
"last_name": "Xylon",
"meeting_id": 1,
},
"user/5": {
"username": "xorr",
"first_name": "John",
"last_name": "Xorr",
},
}
)
status_code, data = self.request("get_users", {})
self.assertEqual(status_code, 200)
self.assertEqual(data, {"users": [1, 2, 3, 5]})
def test_check_sort_title(self) -> None:
self.set_models(
{
"meeting/1": {"name": "meeting1"},
"user/2": {
"username": "florian",
"first_name": "Florian",
"last_name": "Freiheit",
"title": "Accc",
},
"user/3": {
"username": "test",
"first_name": "Testy",
"last_name": "Tester",
"title": "Cbbb",
},
"user/4": {
"username": "john",
"first_name": "John",
"last_name": "Xylon",
"meeting_id": 1,
"title": "Edfff",
},
"user/5": {
"username": "xorr",
"first_name": "John",
"last_name": "Xorr",
},
}
)
status_code, data = self.request("get_users", {"sort_criteria": ["title"]})
self.assertEqual(status_code, 200)
self.assertEqual(data, {"users": [1, 5, 2, 3]})
| 32.72043
| 83
| 0.350969
| 428
| 6,086
| 4.787383
| 0.14486
| 0.09224
| 0.057101
| 0.074671
| 0.847243
| 0.847243
| 0.847243
| 0.847243
| 0.847243
| 0.821864
| 0
| 0.023364
| 0.507723
| 6,086
| 185
| 84
| 32.897297
| 0.660547
| 0
| 0
| 0.625698
| 0
| 0
| 0.234144
| 0
| 0
| 0
| 0
| 0
| 0.055866
| 1
| 0.027933
| false
| 0
| 0.005587
| 0
| 0.039106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b071f50c8cb8b6dfafba7684cf6e8bfc81f59d3
| 54,160
|
py
|
Python
|
ProjectIoT/Consultas/views.py
|
caiki/iot-proyecto
|
2cf9af70ab95ecdb19afc69c38cd634cd07fa5d8
|
[
"Apache-2.0"
] | null | null | null |
ProjectIoT/Consultas/views.py
|
caiki/iot-proyecto
|
2cf9af70ab95ecdb19afc69c38cd634cd07fa5d8
|
[
"Apache-2.0"
] | null | null | null |
ProjectIoT/Consultas/views.py
|
caiki/iot-proyecto
|
2cf9af70ab95ecdb19afc69c38cd634cd07fa5d8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.shortcuts import render, render_to_response, RequestContext
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import User, Group
from django.db.models import Sum,Q,Avg,Count
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.db import IntegrityError , transaction
import json
import sys
from django.core import serializers
from Procesos.models import *
from Mantenimientos.models import *
from Consultas.models import *
from datetime import date, time, timedelta
import time
import datetime
import calendar
from django.db.models import Q
#from sklearn import tree
#import pandas as pd
#import numpy as np
# Load scikit's random forest classifier library
#from sklearn.ensemble import RandomForestClassifier
@login_required
def listado_Clientes_view(request):
if request.is_ajax:
pag=request.GET.get('pg','')
pg= 1
if pag !='':
pg= int(pag)
desde= (pg-1)*20
hasta= pg*20
try:
consulta=list(TTitular.objects.all().values('pk','Nombre','DocID','Debito','Credito').exclude(Q(Nombre="MASTER CARD")| Q(Nombre="VISANET")))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['Nombre']
dato['doc']=obj['DocID']
dato['credito']=round(float(obj['Credito']),2)
dato['debito']=round(float(obj['Debito']),2)
dato['saldo']=round(float(obj['Debito'])-float(obj['Credito']),2)
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Combustibles_view(request):
if request.is_ajax:
try:
consulta=list(TCombustible.objects.all().values('pk','NombreCombustible'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreCombustible']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Tarjetas_view(request):
if request.is_ajax:
try:
#consulta=list(TTitular.objects.all().values('pk','Nombre').filter(Nombre="VISANET"|Nombre="MASTER CARD"))
consulta=list(TTitular.objects.filter(Q(Nombre="MASTER CARD")| Q(Nombre="VISANET")).values('pk','Nombre'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['Nombre']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Estaciones_view(request):
if request.is_ajax:
try:
consulta=list(Grifo.objects.all().values('pk','NombreEstacion','Administrador'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreEstacion']
dato['admin']=obj['Administrador']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Surtidores_view(request):
pkEstacion=request.GET.get('grifo')
if request.is_ajax:
try:
consulta=list(Surtidor.objects.filter(Grifo=pkEstacion).values('pk','NombreSurtidor'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreSurtidor']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Manhguera_view(request):
pkSurtidor=request.GET.get('surtidor')
if request.is_ajax:
try:
consulta=list(TManguera.objects.filter(Surtidor=pkSurtidor).values('pk','NombreManguera','Combustible'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreManguera']
comb= TCombustible.objects.get(pk=obj['Combustible'])
dato['combustible']= comb.NombreCombustible
dato['pk_comb']= obj['Combustible']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Mangueras_tabla_view(request):
pkSurtidor=request.GET.get('surtidor')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TManguera.objects.filter(Surtidor=pkSurtidor).values('pk','NombreManguera','Combustible'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreManguera']
print(dato['pk'])
print(dato['nombre'])
print(obj['Combustible'])
print(pkReporte)
precio= PrecioCombustible.objects.get(Combustible=obj['Combustible'],ReporteDiario=pkReporte)
dato['precio']= float(precio.PrecioActual)
dato['diaI']=''
if LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).exists():
Ulectura= LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).latest("pk")
dato['diaI']=float(Ulectura.ValorContometroFinal2)
dato['diaF']=''
dato['nocheF']=''
dato['pkLectura']=''
if LecturaContometro.objects.filter(ReporteDiario=pkReporte,Manguera=obj['pk']).exists():
lect= LecturaContometro.objects.get(ReporteDiario=pkReporte,Manguera=obj['pk'])
dato['diaF']=float(lect.ValorContometroFinal1)
dato['nocheF']=float(lect.ValorContometroFinal2)
dato['pkLectura']=lect.pk
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_movimientos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TMovimientoVista.objects.filter(ReporteDiario=pkReporte).values('pk','Motivo','IngresoTransferencia','IngresoTransferenciaContable','Tanque_id','Titular_id','Observacion'))
lista=[]
total=0.0
for obj in consulta:
dato={}
tanque=Tanque.objects.get(pk=obj['Tanque_id'])
dato['nombreTanque']=tanque.Nombre
dato['pk']=obj['pk']
dato['motivo']=obj['Motivo']
dato['ingresocontable']=round(float(obj['IngresoTransferenciaContable']),2)
#dato['punitario']=round(float(obj['Punitario']),2)
dato['observacion']=obj['Observacion']
if obj['Titular_id']!=None:
titular=TTitular.objects.get(pk=obj['Titular_id'])
dato['titular']=titular.Nombre
else:
dato['titular']='GRIFO'
dato['ingresoreal']=round(float(obj['IngresoTransferencia']),2)
if obj['Motivo']=='COMPRA' or obj['Motivo']=='DEVOLUCION' or obj['Motivo']==None:
total=round(total+dato['ingresoreal'],2)
else:
total=round(total-dato['ingresoreal'],2)
lista.append(dato)
lista.append({'totalMovimientosreal':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_creditos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleCreditos.objects.filter(ReporteDiario=pkReporte).values('pk','NroVale','ReporteDiario','Titular','Concepto','Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
titu=TTitular.objects.get(pk=obj['Titular'])
if titu.Nombre != "VISANET" and titu.Nombre != "MASTER CARD":
dato['titular']=titu.Nombre[0:26]
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['pktitular']=obj['Titular']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalCreditos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_compras_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TCompra.objects.values('FechaCompra','NroDocumento','idMovimientoVista','TransporteGalon','Punitario','Igv','Percepcion'))
lista=[]
total=0
PrecioCompra = 0
for obj in consulta:
dato={}
MovimientoVista=TMovimientoVista.objects.get(pk=obj['idMovimientoVista'])
dato['FechaCompra']=str(obj['FechaCompra'])
dato['NroDocumento']=obj['NroDocumento']
oTanque = Tanque.objects.get(pk=int(str(MovimientoVista.Tanque)))
dato['Tanque'] = oTanque.Nombre
dato['GalonesContable'] = str(round(MovimientoVista.IngresoTransferencia,2))
dato['Punitario'] = str(obj['Punitario'])
PrecioCompra = obj['Punitario']*MovimientoVista.IngresoTransferencia
dato['PrecioCompra'] = str(round(PrecioCompra,2))
dato['CompraIgvPercepcion'] = str(round(PrecioCompra*(1+obj['Igv']/100)*(1+obj['Percepcion']/100),2))
lista.append(dato)
#total=round(total+dato['monto'],2)
#lista.append({'totalCreditos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_creditos_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleCreditos.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','NroVale','ReporteDiario','Titular','Concepto','Monto'))
lista=[]
for obj in consulta:
dato={}
titu=TTitular.objects.get(pk=obj['Titular'])
if titu.Nombre != "VISANET" and titu.Nombre != "MASTER CARD":
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['pktitular']=obj['Titular']
dato['titular']=titu.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_descuentos_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleDescuento.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','NroVale','ReporteDiario','Titular','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['pktitular']=obj['Titular']
titu=TTitular.objects.get(pk=obj['Titular'])
dato['titular']=titu.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalDescuentos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_gastos_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleGastos.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','NroVale','ReporteDiario','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalGastos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Serafin_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleSerafin.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','NroVale','ReporteDiario','Cantidad','Combustible','Observacion','Monto'))
lista=[]
total = 0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['observacion']=obj['Observacion']
dato['pkcombustible']=obj['Combustible']
ncombustible=TCombustible.objects.get(pk=obj['Combustible'])
dato['combustible']=ncombustible.NombreCombustible
dato['cantidad']=round(float(obj['Cantidad']),2)
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalSerafin':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_depositos_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(Tdeposito.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','NroBoucher','ReporteDiario','Concepto','Monto'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroBoucher']=obj['NroBoucher']
dato['reporte']=obj['ReporteDiario']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_pagos_view(request):
pg=request.GET.get('pg')
pkGrifo=request.GET.get('pkGrifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TPagos.objects.filter(ReporteDiario=pkReporte,ReporteDiario__Grifo=pkGrifo).values('pk','ReporteDiario','Titular','Concepto','MontoAmortizado'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['reporte']=obj['ReporteDiario']
titu=TTitular.objects.get(pk=obj['Titular'])
dato['titular']=titu.Nombre
dato['pktitular']=obj['Titular']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['MontoAmortizado']),2)
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_PrecioCombustible_view(request):
pkReport=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(PrecioCombustible.objects.filter(ReporteDiario=pkReport).values('pk','Combustible','PrecioAnterior','PrecioActual'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
comb= TCombustible.objects.get(pk=obj['Combustible'])
dato['pkcombustible']=obj['Combustible']
dato['combustible']=comb.NombreCombustible
dato['precioActual']=float(obj['PrecioActual'])
dato['precioAnterior']=float(obj['PrecioAnterior'])
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Tanques_ajax_view(request):
pkEstacion=request.GET.get('grifo')
if request.is_ajax:
try:
consulta=list(Tanque.objects.filter(Grifo=pkEstacion).values('pk','Nombre','Combustible'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['Nombre']
dato['pkCombustible']=obj['Combustible']
combu= TCombustible.objects.get(pk=obj['Combustible'])
dato['combustible']=combu.NombreCombustible
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Tanques_tabla_view(request):
pkGrifo=request.GET.get('grifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(Tanque.objects.filter(Grifo=pkGrifo).values('pk','Nombre','Combustible'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['Nombre']
precio= PrecioCombustible.objects.get(Combustible=obj['Combustible'],ReporteDiario=pkReporte)
dato['precio']= float(precio.PrecioActual)
dato['saldoAnt']=''
if TSaldoCombustible.objects.filter(ReporteDiario__lt=pkReporte,Tanque=obj['pk']).exists():
UMovi= TSaldoCombustible.objects.filter(ReporteDiario__lt=pkReporte,Tanque=obj['pk']).latest("pk")
dato['saldoAnt']=float(UMovi.SaldoActual)
dato['saldoAct']=''
dato['ingresoTra']=''
dato['pkMovi']=''
if TSaldoCombustible.objects.filter(ReporteDiario=pkReporte,Tanque=obj['pk']).exists():
lect= TSaldoCombustible.objects.get(ReporteDiario=pkReporte,Tanque=obj['pk'])
dato['saldoAct']=float(lect.SaldoActual)
dato['pkMovi']=lect.pk
query = list(TMovimientoVista.objects.filter(ReporteDiario=pkReporte,Tanque=obj['pk']).values('pk','IngresoTransferencia','Motivo'))
if query:
tot = 0
for objmov in query:
if objmov['Motivo']=='COMPRA' or objmov['Motivo']=='DEVOLUCION' or objmov['Motivo']==None:
tot = tot + float(objmov['IngresoTransferencia'])
else:
tot = tot - float(objmov['IngresoTransferencia'])
dato['ingresoTra'] = tot
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_diario_ajax_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
reporte=TReporteDiario.objects.get(pk=pkReporte)
dato={}
dato['pk']=reporte.pk
dato['fechaIni']=str(reporte.FechaInicial.strftime("%Y-%m-%d"))
dato['fechaFin']=str(reporte.FechaFinal.strftime("%Y-%m-%d"))
dato['griferos']=reporte.Griferos
dato['estacion']=reporte.Grifo.NombreEstacion
dato['admin']=reporte.Grifo.Administrador
dato['obs']=reporte.Observacion
dato['monto_griferos']=float(reporte.DepositoBancos)
dato['saldoCredito']=float(reporte.SaldoCredito)
dato['saldoGasto']=float(reporte.SaldoGastos)
dato['saldoGastoAnt']=0
dato['saldoCreditoAnt']=0
if TReporteDiario.objects.filter(pk__lt=pkReporte,Grifo=reporte.Grifo.pk).exists():
UReporte=TReporteDiario.objects.filter(pk__lt=pkReporte,Grifo=reporte.Grifo.pk).latest("pk")
dato['saldoGastoAnt']=float(UReporte.SaldoGastos)
dato['saldoCreditoAnt']=float(UReporte.SaldoCredito)
lista_surtidor=[]
surtidores = list(Surtidor.objects.filter(Grifo=reporte.Grifo.pk).values('pk','NombreSurtidor'))
for obj in surtidores:
aux={}
aux['pk']=obj['pk']
aux['surtifor']=obj['NombreSurtidor']
lista_surtidor.append(aux);
dato['surtidores']=lista_surtidor
data= json.dumps(dato)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
def esReal(t):
try:
f=float(t)
return True
except:
return False
@login_required
def lectura_Xreporte_ajax_view(request):
pkReporte=request.GET.get('pkReporte')
pkSurtidor=request.GET.get('surtidor')
if request.is_ajax:
try:
consulta=list(TManguera.objects.filter(Surtidor=pkSurtidor).values('pk','NombreManguera','Combustible'))
lista=[]
totalV=0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['NombreManguera']
precio= PrecioCombustible.objects.get(Combustible=obj['Combustible'],ReporteDiario=pkReporte)
dato['precio']= float(precio.PrecioActual)
dato['diaI']=''
if LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).exists():
Ulectura= LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).latest("pk")
dato['diaI']=float(Ulectura.ValorContometroFinal2)
dato['diaF']=''
dato['nocheF']=''
dato['pkLectura']=''
if LecturaContometro.objects.filter(ReporteDiario=pkReporte,Manguera=obj['pk']).exists():
lect= LecturaContometro.objects.get(ReporteDiario=pkReporte,Manguera=obj['pk'])
dato['diaF']=float(lect.ValorContometroFinal1)
dato['nocheF']=float(lect.ValorContometroFinal2)
dato['pkLectura']=lect.pk
dato['cantGa1']= ''
dato['cantGa2']= ''
dato['totalGa']= ''
dato['venta']= ''
if esReal(dato['diaI']) and esReal(dato['diaF']) and esReal(dato['nocheF']):
aux1= dato['diaF'] - dato['diaI']
aux2= dato['nocheF'] - dato['diaF']
dato['cantGa1']=round(aux1,2)
dato['cantGa2']=round(aux2,2)
dato['totalGa']=round(aux2+aux1,2)
dato['venta']=round((aux2+aux1)* dato['precio'],2)
totalV=totalV+dato['venta']
totalV=round(totalV,2)
lista.append(dato)
lista.append({'totalVenta':totalV})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_tarjetas_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
#consulta=list(TTitular.objects.filter(Q(Nombre="MASTER CARD")| Q(Nombre="VISANET")).values('pk','Nombre'))
consulta=list(TDetalleCreditos.objects.filter(ReporteDiario=pkReporte).values('pk','NroVale','ReporteDiario','Titular','Concepto','Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
titu=TTitular.objects.get(pk=obj['Titular'])
if titu.Nombre == "VISANET" or titu.Nombre == "MASTER CARD":
dato['titular']=titu.Nombre[0:26]
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['pktitular']=obj['Titular']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalCreditos':total})
data= json.dumps(lista)
print(data)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_descuentos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
#consulta=list(TTitular.objects.filter(Q(Nombre="MASTER CARD")| Q(Nombre="VISANET")).values('pk','Nombre'))
consulta=list(TDetalleDescuento.objects.filter(ReporteDiario=pkReporte).values('Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalDescuentos':total})
data= json.dumps(lista)
print(data)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_serafin_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
#consulta=list(TTitular.objects.filter(Q(Nombre="MASTER CARD")| Q(Nombre="VISANET")).values('pk','Nombre'))
consulta=list(TDetalleSerafin.objects.filter(ReporteDiario=pkReporte).values('Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalDescuentos':total})
data= json.dumps(lista)
print(data)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_gastos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TDetalleGastos.objects.filter(ReporteDiario=pkReporte).values('pk','NroVale','ReporteDiario','Concepto','Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['concepto']=obj['Concepto'][0:31]
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalGastos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_depositos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(Tdeposito.objects.filter(ReporteDiario=pkReporte).values('pk','NroBoucher','ReporteDiario','Concepto','Monto'))
lista=[]
total=0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroBoucher']=obj['NroBoucher']
dato['reporte']=obj['ReporteDiario']
dato['concepto']=obj['Concepto'][0:31]
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalDepositos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
def calcular_monto_contometro(pkCombustible,pkReporte,pkGrifo,pkTanque):
total=0
total_dia=0
total_noche=0
precio=0
p= PrecioCombustible.objects.get(Combustible=pkCombustible,ReporteDiario=pkReporte)
precio= float(p.PrecioActual)
consulta=list(TManguera.objects.filter(Surtidor__Grifo=pkGrifo,Combustible=pkCombustible,Tanque=pkTanque).values('pk','NombreManguera','Combustible'))
for obj in consulta:
diai=''
diaf=''
nochef=''
if LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).exists():
Ulectura= LecturaContometro.objects.filter(ReporteDiario__lt=pkReporte,Manguera=obj['pk']).latest("pk")
diai=float(Ulectura.ValorContometroFinal2)
if LecturaContometro.objects.filter(ReporteDiario=pkReporte,Manguera=obj['pk']).exists():
lect= LecturaContometro.objects.get(ReporteDiario=pkReporte,Manguera=obj['pk'])
diaf=float(lect.ValorContometroFinal1)
nochef=float(lect.ValorContometroFinal2)
if esReal(diai) and esReal(diaf) and esReal(nochef):
auxto= (nochef - diai)*precio
auxdia= (diaf - diai)*precio
auxnoche= (nochef - diaf)*precio
total=total+auxto
total_dia=total_dia+auxdia
total_noche=total_noche+auxnoche
return total,total_dia,total_noche
@login_required
def Movimiento_vista_reporte_view(request):
pkGrifo=request.GET.get('grifo')
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(Tanque.objects.filter(Grifo=pkGrifo).values('pk','Nombre','Combustible'))
lista=[]
TsaldoAnt=0
TingresoTra=0
TsaldoAct=0
TventaGln=0
Ttotal=0
#precio e ingresoTra es igual para movimiento contable
Tmc_saldoAnt=0
Tmc_saldoAct=0
Tmc_ventaGln=0
Tmc_total=0
Tmc_total_dia=0
Tmc_total_noche=0
Tdif=0
TsaldoAcu=0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nombre']=obj['Nombre']
precio= PrecioCombustible.objects.get(Combustible=obj['Combustible'],ReporteDiario=pkReporte)
dato['precio']= float(precio.PrecioActual)
dato['saldoAnt']=''
dato['mc_saldoAnt']=''
if TSaldoCombustible.objects.filter(ReporteDiario__lt=pkReporte,Tanque=obj['pk']).exists():
UMovi= TSaldoCombustible.objects.filter(ReporteDiario__lt=pkReporte,Tanque=obj['pk']).latest("pk")
dato['saldoAnt']=float(UMovi.SaldoActual)
dato['mc_saldoAnt']=float(UMovi.SaldoActualMovContable)
TsaldoAnt= round(TsaldoAnt+dato['saldoAnt'],2)
Tmc_saldoAnt= round(Tmc_saldoAnt+dato['mc_saldoAnt'],2)
dato['saldoAct']=''
dato['ingresoTra']=''
dato['pkMovi']=''
query = list(TMovimientoVista.objects.filter(ReporteDiario=pkReporte,Tanque=obj['pk']).values('pk','IngresoTransferencia','Motivo'))
if query:
tot = 0
for objmov in query:
if objmov['Motivo']=='COMPRA' or objmov['Motivo']=='DEVOLUCION' or objmov['Motivo']==None:
tot = tot + float(objmov['IngresoTransferencia'])
else:
tot = tot - float(objmov['IngresoTransferencia'])
dato['ingresoTra'] = tot
TingresoTra= round(TingresoTra + dato['ingresoTra'],2)
if TSaldoCombustible.objects.filter(ReporteDiario=pkReporte,Tanque=obj['pk']).exists():
lect= TSaldoCombustible.objects.get(ReporteDiario=pkReporte,Tanque=obj['pk'])
dato['saldoAct']=float(lect.SaldoActual)
dato['pkMovi']=lect.pk
TsaldoAct=round(TsaldoAct + dato['saldoAct'],2)
dato['ventaGln']=''
dato['total']=''
dato['mc_total']=''
dato['mc_ventaGln']=''
dato['mc_saldoAct']=''
dato['mc_dif']=''
dato['saldoAcu']=''
if esReal(dato['saldoAnt']) and esReal(dato['saldoAct']) and esReal(dato['ingresoTra']):
aux= dato['saldoAnt'] +dato['ingresoTra']-dato['saldoAct']
dato['ventaGln']=round(aux,2)
dato['total']=round(aux*dato['precio'],2)
# aux1,aux2,aux3=calcular_monto_contometro(obj['Combustible'],pkReporte,pkGrifo)
aux1,aux2,aux3=calcular_monto_contometro(obj['Combustible'],pkReporte,pkGrifo,obj['pk'])
dato['mc_total']=round(aux1,2)
dato['mc_ventaGln']=round(dato['mc_total']/dato['precio'],2)
dato['mc_saldoAct']=round(dato['mc_saldoAnt']+dato['ingresoTra']-dato['mc_ventaGln'],2)
dato['mc_dif']=round(dato['mc_ventaGln'] -dato['ventaGln'],2)
dato['saldoAcu']= round(dato['saldoAct'] - dato['mc_saldoAct'],2)
TventaGln= round(TventaGln+aux,2)
Tmc_ventaGln= round(Tmc_ventaGln+dato['mc_ventaGln'],2)
Tmc_saldoAct= round(Tmc_saldoAct+dato['mc_saldoAct'],2)
Tmc_total= round(Tmc_total+dato['mc_total'],2)
Tmc_total_dia= round(Tmc_total_dia+aux2,2)
Tmc_total_noche= round(Tmc_total_noche+ aux3,2)
Ttotal= round(Ttotal+dato['total'],2)
Tdif= round(Tdif+dato['mc_dif'],2)
TsaldoAcu= round(TsaldoAcu+dato['saldoAcu'],2)
lista.append(dato)
lista.append({'TsaldoAnt':TsaldoAnt,'TsaldoAct':TsaldoAct,'TingresoTra':TingresoTra,'TventaGln':TventaGln,'Ttotal':Ttotal,'Tmc_total':Tmc_total,'Tmc_saldoAnt':Tmc_saldoAnt,'Tmc_ventaGln':Tmc_ventaGln,'Tmc_saldoAct':Tmc_saldoAct,'Tdif':Tdif,'TsaldoAcu':TsaldoAcu,'Tmc_total_dia':Tmc_total_dia,'Tmc_total_noche':Tmc_total_noche})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_pagos_reporte_view(request):
pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(TPagos.objects.filter(ReporteDiario=pkReporte).values('pk','Concepto','MontoAmortizado','Titular'))
lista=[]
total=0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['concepto']=obj['Concepto']
titu=TTitular.objects.get(pk=obj['Titular'])
dato['titular']=titu.Nombre[0:18]+"..."
dato['monto']=round(float(obj['MontoAmortizado']),2)
total=total+dato['monto']
lista.append(dato)
lista.append({'totalPagos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_reportes_view(request):
return render_to_response('ListadoReportes.html',context_instance=RequestContext(request))
@login_required
def listado_reportes_ajax_view(request):
pkGrifo=request.GET.get('pkGrifo')
if request.is_ajax:
try:
consulta=list(TReporteDiario.objects.filter(Grifo=pkGrifo).order_by('-pk').values('pk','FechaInicial','FechaFinal','Griferos','Grifo','DepositoBancos','SaldoCredito','SaldoGastos','Observacion'))
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['desde']=str(obj['FechaInicial'].strftime("%d-%m-%Y"))
dato['hasta']=str(obj['FechaFinal'].strftime("%d-%m-%Y"))
dato['griferos']=obj['Griferos']
g=Grifo.objects.get(pk=obj['Grifo'])
dato['Grifo']=g.NombreEstacion
dato['montoGri']=round(float(obj['DepositoBancos']),2)
dato['saldoGa']=round(float(obj['SaldoGastos']),2)
dato['saldoCre']=round(float(obj['SaldoCredito']),2)
dato['obs']=obj['Observacion']
lista.append(dato)
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
#reportes mensuales
@login_required
def reporte_sa_compra_mes_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m).latest('FechaFinal')
pre_combustibles = list(PrecioCombustible.objects.filter(ReporteDiario=reporte.pk).values('Combustible','PrecioActual'))
lista=[]
total=0
for obj in pre_combustibles:
comb = TCombustible.objects.get(pk=obj['Combustible'])
dato={}
dato['prod']=comb.NombreCombustible
lec = LecturaContometro.objects.filter(ReporteDiario=reporte.pk,Manguera__Combustible=obj['Combustible']).aggregate(galones=Sum('ValorContometroFinal2'))
dato['galones_sa']=0
if lec['galones']!=None:
dato['galones_sa']= float(lec['galones'])
dato['ppv'] = float(obj['PrecioActual'])
dato['subto_sa']= round(float(dato['galones_sa']*dato['ppv']),2)
total=total+dato['subto_sa']
lista.append(dato)
lista.append({'total_sa':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_creditos_mes_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
#reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m)
creditos = list(TDetalleCreditos.objects.exclude(Titular__Nombre='VISANET').exclude(Titular__Nombre='MARTER CARD').filter(ReporteDiario__Grifo=pkEstacion,ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m).values('Titular').annotate(Monto=Sum('Monto')).order_by('-Monto'))
lista=[]
total=0
print len(creditos)
for obj in creditos:
pago = TPagos.objects.filter(ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular=obj['Titular']).aggregate(Monto=Sum('MontoAmortizado'))
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['nombre']= titular.Nombre[:30]
m_pago=0
if pago['Monto'] != None:
m_pago=float(pago['Monto'])
dato['credito'] = float(obj['Monto'])-m_pago
total= total+dato['credito']
lista.append(dato)
lista.append({'totalCredito':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_gastos_mes_view(request):
pkGrifo=request.GET.get('pkGrifo')
filtro=request.GET.get('filtro')
f1=request.GET.get('F1')
m = f1.split('-')[1]
y = f1.split('-')[0]
f2=request.GET.get('F2')
if request.is_ajax:
try:
consulta=list(TDetalleGastos.objects.filter(ReporteDiario__Grifo=pkGrifo,ReporteDiario__FechaFinal__month=m,ReporteDiario__FechaFinal__year=y).values('pk','NroVale','ReporteDiario','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=round(total+dato['monto'],2)
lista.append(dato)
lista.append({'totalGastos':total})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_gastos_reporte_mes_view(request):
return render_to_response('reporteGastos.html',context_instance=RequestContext(request))
@login_required
def listado_creditos_reporte_mes_view(request):
return render_to_response('reporteCreditos.html',context_instance=RequestContext(request))
@login_required
def listado_credito_mes_view(request):
pkGrifo=request.GET.get('pkGrifo')
filtro=request.GET.get('filtro')
f1=request.GET.get('F1')
m = f1.split('-')[1]
y = f1.split('-')[0]
f2=request.GET.get('F2')
if request.is_ajax:
try:
consulta=list(TDetalleCreditos.objects.filter(ReporteDiario__Grifo=pkGrifo,ReporteDiario__FechaFinal__month=m,ReporteDiario__FechaFinal__year=y).exclude(Titular__Nombre='VISANET').exclude(Titular__Nombre='MARTER CARD').values('pk','NroVale','Titular','ReporteDiario','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['titular']=titular.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=total+dato['monto']
lista.append(dato)
lista.append({'totalCreditos':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_creditos_mes_cliente_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
#reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m)
creditos = list(TDetalleCreditos.objects.exclude(Titular__Nombre='VISANET').exclude(Titular__Nombre='MARTER CARD').filter(ReporteDiario__Grifo=pkEstacion,ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m).values('Titular').annotate(Monto=Sum('Monto')).order_by('-Monto'))
lista=[]
total=0
print len(creditos)
for obj in creditos:
#pago = TPagos.objects.filter(ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular=obj['Titular']).aggregate(Monto=Sum('MontoAmortizado'))
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['nombre']= titular.Nombre[:30]
#m_pago=0
#if pago['Monto'] != None:
# m_pago=float(pago['Monto'])
dato['credito'] = float(obj['Monto'])#-m_pago
total= total+dato['credito']
lista.append(dato)
lista.append({'totalCredito':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
#tarjetas
@login_required
def listado_Tarjeta_mes_view(request):
pkGrifo=request.GET.get('pkGrifo')
filtro=request.GET.get('filtro')
f1=request.GET.get('F1')
m = f1.split('-')[1]
y = f1.split('-')[0]
f2=request.GET.get('F2')
if request.is_ajax:
try:
consulta=list(TDetalleCreditos.objects.filter(ReporteDiario__Grifo=pkGrifo,ReporteDiario__FechaFinal__month=m,ReporteDiario__FechaFinal__year=y,Titular__Nombre__in=['VISANET','MARTER CARD']).values('pk','NroVale','Titular','ReporteDiario','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['tarjeta']=titular.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=total+dato['monto']
lista.append(dato)
lista.append({'totalTarjetas':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_tarjeta_mes_tipo_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
#reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m)
creditos = list(TDetalleCreditos.objects.filter(ReporteDiario__Grifo=pkEstacion,ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular__Nombre__in=['VISANET','MARTER CARD']).values('Titular').annotate(Monto=Sum('Monto')).order_by('-Monto'))
lista=[]
total=0
print len(creditos)
for obj in creditos:
#pago = TPagos.objects.filter(ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular=obj['Titular']).aggregate(Monto=Sum('MontoAmortizado'))
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['nombre']= titular.Nombre[:30]
#m_pago=0
#if pago['Monto'] != None:
# m_pago=float(pago['Monto'])
dato['credito'] = float(obj['Monto'])#-m_pago
total= total+dato['credito']
lista.append(dato)
lista.append({'totalTarjeta':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_tarjeta_reporte_mes_view(request):
return render_to_response('reporteTarjetas.html',context_instance=RequestContext(request))
@login_required
def listado_descuentos_mes_view(request):
pkGrifo=request.GET.get('pkGrifo')
filtro=request.GET.get('filtro')
f1=request.GET.get('F1')
m = f1.split('-')[1]
y = f1.split('-')[0]
f2=request.GET.get('F2')
if request.is_ajax:
try:
consulta=list(TDetalleDescuento.objects.filter(ReporteDiario__Grifo=pkGrifo,ReporteDiario__FechaFinal__month=m,ReporteDiario__FechaFinal__year=y).values('pk','NroVale','Titular','ReporteDiario','Concepto','Monto'))
lista=[]
total = 0
for obj in consulta:
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['pk']=obj['pk']
dato['nroVale']=obj['NroVale']
dato['reporte']=obj['ReporteDiario']
dato['titular']=titular.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['Monto']),2)
total=total+dato['monto']
lista.append(dato)
lista.append({'totalDescuentos':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_descuentos_mes_cliente_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
#reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m)
descuentos = list(TDetalleDescuento.objects.filter(ReporteDiario__Grifo=pkEstacion,ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m).values('Titular').annotate(Monto=Sum('Monto')).order_by('-Monto'))
lista=[]
total=0
for obj in descuentos:
#pago = TPagos.objects.filter(ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular=obj['Titular']).aggregate(Monto=Sum('MontoAmortizado'))
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['nombre']= titular.Nombre[:30]
#m_pago=0
#if pago['Monto'] != None:
# m_pago=float(pago['Monto'])
dato['descuento'] = float(obj['Monto'])#-m_pago
total= total+dato['descuento']
lista.append(dato)
lista.append({'totalDescuento':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Descuento_reporte_mes_view(request):
return render_to_response('reporteDescuentos.html',context_instance=RequestContext(request))
#**************************** PAGOS ******************************
@login_required
def listado_pagos_mes_view(request):
pkGrifo=request.GET.get('pkGrifo')
filtro=request.GET.get('filtro')
f1=request.GET.get('F1')
m = f1.split('-')[1]
y = f1.split('-')[0]
f2=request.GET.get('F2')
if request.is_ajax:
try:
consulta=list(TPagos.objects.filter(ReporteDiario__Grifo=pkGrifo,ReporteDiario__FechaFinal__month=m,ReporteDiario__FechaFinal__year=y).values('pk','Titular','ReporteDiario','Concepto','MontoAmortizado'))
lista=[]
total = 0
for obj in consulta:
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['pk']=obj['pk']
dato['reporte']=obj['ReporteDiario']
dato['titular']=titular.Nombre
dato['concepto']=obj['Concepto']
dato['monto']=round(float(obj['MontoAmortizado']),2)
total=total+dato['monto']
lista.append(dato)
lista.append({'totalPagos':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def reporte_Pagos_mes_cliente_ajax_view(request):
pkEstacion=request.GET.get('pkEstacion')
mes=request.GET.get('mes')
if request.is_ajax:
try:
m = mes.split('-')[1]
y = mes.split('-')[0]
#reporte=TReporteDiario.objects.filter(FechaFinal__year=y,FechaFinal__month=m)
pagos = list(TPagos.objects.filter(ReporteDiario__Grifo=pkEstacion,ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m).values('Titular').annotate(Monto=Sum('MontoAmortizado')).order_by('-Monto'))
lista=[]
total=0
for obj in pagos:
#pago = TPagos.objects.filter(ReporteDiario__FechaFinal__year=y,ReporteDiario__FechaFinal__month=m,Titular=obj['Titular']).aggregate(Monto=Sum('MontoAmortizado'))
titular = TTitular.objects.get(pk=obj['Titular'])
dato={}
dato['nombre']= titular.Nombre[:30]
#m_pago=0
#if pago['Monto'] != None:
# m_pago=float(pago['Monto'])
dato['pago'] = float(obj['Monto'])#-m_pago
total= total+dato['pago']
lista.append(dato)
lista.append({'totalPago':round(total,2)})
data= json.dumps(lista)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def listado_Pago_reporte_mes_view(request):
return render_to_response('reportePagos.html',context_instance=RequestContext(request))
#-------------------------------------Nuevo-------------------------------------------
def myconverter(o):
if isinstance(o, datetime.datetime):
return o.__str__()
@login_required
def listado_heart_view(request):
pg=request.GET.get('pg')
#pkGrifo=request.GET.get('pkGrifo')
#pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(THeart.objects.values('pk','FechaTiempo','Beat').reverse()[:20])
lista=[]
for obj in consulta:
dato={}
dato['pk']=obj['pk']
dato['FechaTiempo']=obj['FechaTiempo']
dato['Beat']=int(obj['Beat'])
lista.append(dato)
#print(dato)
data= json.dumps(lista, default = myconverter)
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def read_data_ML(self):
pg=request.GET.get('pg')
if request.is_ajax:
try:
consulta=list(THeart.objects.values('pk','FechaTiempo','Beat').reverse()[:100])
except:
data='fail'+ str(sys.exc_info()[1])
else:
consulta = []
return consulta
'''
@login_required
def analizar_rf_train_view(request):
pg=request.GET.get('pg')
#pkGrifo=request.GET.get('pkGrifo')
#pkReporte=request.GET.get('pkReporte')
if request.is_ajax:
try:
consulta=list(THeart.objects.values('Beat').reverse()[:100])
#lista=[]
#for obj in consulta:
# dato={}
# dato['Beat']=int(obj['Beat'])
# lista.append(dato)
#print(dato)
#Lectura de la base de datos
#print lista
#df = pd.DataFrame(lista)
#print df
#Lectura de archivo csv
# Create a dataframe with the four feature variables
df = pd.read_csv("DadosG.csv")
# View the top 5 rows
#print df
# Create a new column that for each row, generates a random number between 0 and 1, and
# if that value is less than or equal to .75, then sets the value of that cell as True
# and false otherwise. This is a quick and dirty way of randomly assigning some rows to
# be used as the training data and some as the test data.
df['is_train'] = np.random.uniform(0, 1, len(df)) <= .75
# Create two new dataframes, one with the training rows, one with the test rows
train, test = df[df['is_train']==True], df[df['is_train']==False]
#print train[['BPM','Edad','Estatura','Sexo','Peso']].head()
#print test[['BPM','Edad','Estatura','Sexo','Peso']].head()
# Show the number of observations for the test and training dataframes
print('Number of observations in the training data:', len(train))
print('Number of observations in the test data:',len(test))
# Create a list of the feature column's names
# features = df.columns[:4]
# features = ['BPM','Edad','Estatura','Sexo','Peso']
# print features
# train['species'] contains the actual species names. Before we can use it,
# we need to convert each species name into a digit. So, in this case there
# are two states, which have been coded as 0, 1.
y = pd.factorize(train['Estado'])[0]
#print y
# Create a random forest Classifier. By convention, clf means 'Classifier'
clf = RandomForestClassifier(n_jobs=2, random_state=0)
# Train the Classifier to take the training features and learn how they relate
# to the training y (the species)
#print train[['BPM','Edad','Estatura','Sexo','Peso']]
clf.fit(train[['BPM','Edad','Estatura','Sexo','Peso']], y)
# Apply the Classifier we trained to the test data (which, remember, it has never seen before)
predict = clf.predict(test[['BPM','Edad','Estatura','Sexo','Peso']])
# View the predicted probabilities of the first 10 observations
predict_prob = clf.predict_proba(test[['BPM','Edad','Estatura','Sexo','Peso']])
neto = np.column_stack((predict,predict_prob ))
print neto
data= json.dumps(neto.tolist())
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
@login_required
def analizar_rf_view(request):
pg=request.GET.get('pg')
#pkGrifo=request.GET.get('pkGrifo')
#pkReporte=request.GET.get('pkReporte')
n = 50 # Tamanio de la consulta del dataframe
if request.is_ajax:
try:
consulta=list(THeart.objects.values('Beat').reverse()[:n])
lista=[]
for obj in consulta:
dato={}
dato['BPM']=int(obj['Beat'])
lista.append(dato)
#print(dato)
#Lectura de la base de datos
df1 = pd.DataFrame(lista)
#edad = np.full((n, 1), 28, dtype=int)
df1['Edad'] = 28
df1['Estatura'] = 169
df1['Sexo'] = 1
df1['Peso'] = 75
print("Data Predecir:")
print df1
#print join
#Lectura de archivo csv
# Create a dataframe with the four feature variables
df = pd.read_csv("DadosG.csv")
# View the top 5 rows
print("Data Train:")
print df.head()
# Create a new column that for each row, generates a random number between 0 and 1, and
# if that value is less than or equal to .75, then sets the value of that cell as True
# and false otherwise. This is a quick and dirty way of randomly assigning some rows to
# be used as the training data and some as the test data.
# df['is_train'] = np.random.uniform(0, 1, len(df)) <= .75
# Create two new dataframes, one with the training rows, one with the test rows
# train, test = df[df['is_train']==True], df[df['is_train']==False]
#print train[['BPM','Edad','Estatura','Sexo','Peso']].head()
#print test[['BPM','Edad','Estatura','Sexo','Peso']].head()
# Show the number of observations for the test and training dataframes
# print('Number of observations in the training data:', len(train))
# print('Number of observations in the test data:',len(test))
# Create a list of the feature column's names
# features = df.columns[:4]
# features = ['BPM','Edad','Estatura','Sexo','Peso']
# print features
# train['species'] contains the actual species names. Before we can use it,
# we need to convert each species name into a digit. So, in this case there
# are two states, which have been coded as 0, 1.
# y = pd.factorize(train['Estado'])[0]
y = pd.factorize(df['Estado'])[0]
print("Estado Normal= 0 , Estado Arritmia = 1:")
print y
# Create a random forest Classifier. By convention, clf means 'Classifier'
clf = RandomForestClassifier(n_jobs=2, random_state=0)
# Train the Classifier to take the training features and learn how they relate
# to the training y (the species)
#print train[['BPM','Edad','Estatura','Sexo','Peso']]
clf.fit(df[['BPM','Edad','Estatura','Sexo','Peso']], y)
#clf.fit(train[['BPM','Edad','Estatura','Sexo','Peso']], y)
# Apply the Classifier we trained to the test data (which, remember, it has never seen before)
#predict = clf.predict(test[['BPM','Edad','Estatura','Sexo','Peso']])
predict = clf.predict(df1[['BPM','Edad','Estatura','Sexo','Peso']])
print("predict simple:")
print predict
# View the predicted probabilities of the first 10 observations
predict_prob = clf.predict_proba(df1[['BPM','Edad','Estatura','Sexo','Peso']])
print("predict_prob:")
print predict_prob
neto = np.column_stack((predict,predict_prob))
print("neto join:")
print neto
data= json.dumps(neto.tolist())
except:
data='fail'+ str(sys.exc_info()[1])
else:
data='fail'
mimetype="application/json"
return HttpResponse(data,mimetype)
'''
| 34.807198
| 330
| 0.698948
| 6,973
| 54,160
| 5.332568
| 0.070701
| 0.018718
| 0.029367
| 0.01775
| 0.799995
| 0.781384
| 0.765168
| 0.75277
| 0.734187
| 0.714097
| 0
| 0.007723
| 0.1274
| 54,160
| 1,556
| 331
| 34.807198
| 0.779073
| 0.044701
| 0
| 0.751553
| 0
| 0
| 0.166795
| 0.002181
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013975
| null | null | 0.007764
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b10d7bcc06cee2a8edcf285c944b01aa7888f50
| 6,536
|
py
|
Python
|
tests/test_message_api.py
|
nelfin/pylint-protobuf
|
dbdfaecfbf248ccc75f6cdef4b29e4fb6a21e70e
|
[
"MIT"
] | 25
|
2018-09-21T13:21:25.000Z
|
2021-12-30T06:00:57.000Z
|
tests/test_message_api.py
|
nelfin/pylint-protobuf
|
dbdfaecfbf248ccc75f6cdef4b29e4fb6a21e70e
|
[
"MIT"
] | 48
|
2019-01-26T10:10:43.000Z
|
2021-07-27T02:23:19.000Z
|
tests/test_message_api.py
|
nelfin/pylint-protobuf
|
dbdfaecfbf248ccc75f6cdef4b29e4fb6a21e70e
|
[
"MIT"
] | 12
|
2018-09-27T09:19:58.000Z
|
2021-02-12T17:17:07.000Z
|
import pytest
import pylint_protobuf
from conftest import CheckerTestCase
@pytest.fixture
def proto2_example(proto_builder):
return proto_builder("""
message Example {
message Inner {
required int32 sub_count = 1;
}
required int32 count = 1;
required Inner inner = 2;
optional string tag = 3;
repeated int32 friends = 4;
}
""")
class TestMessageApiProto2(CheckerTestCase):
CHECKER_CLASS = pylint_protobuf.ProtobufDescriptorChecker
@pytest.mark.parametrize('field,field_to_check,message_id,message_args', [
('example', 'count', None, None), # unset required
('example', 'tag', None, None), # unset optional
('example', 'inner', None, None), # unset sub_message
('example', 'friends', 'protobuf-no-repeated-membership', None), # repeated field
('example', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Example')), # no such field
('example.inner', 'sub_count', None, None), # unset sub_message required field
('example.inner', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Inner')), # no such field
])
def test_hasfield(self, field, field_to_check, message_id, message_args, proto2_example):
node = self.extract_node("""
from {} import Example
example = Example()
{}.HasField({!r})
""".format(proto2_example, field, field_to_check))
if message_id is None:
self.assert_no_messages(node)
else:
msg = self.make_message(message_id, node, message_args)
self.assert_adds_messages(node, msg)
@pytest.mark.parametrize('field,field_to_check,message_id,message_args', [
('example', 'count', None, None), # unset required
('example', 'tag', None, None), # unset optional
('example', 'inner', None, None), # unset sub_message
('example', 'friends', 'protobuf-no-repeated-membership', None), # repeated field
('example', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Example')), # no such field
('example.inner', 'sub_count', None, None), # unset sub_message required field
('example.inner', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Inner')), # no such field
])
def test_clearfield(self, field, field_to_check, message_id, message_args, proto2_example):
node = self.extract_node("""
from {} import Example
example = Example()
{}.ClearField({!r})
""".format(proto2_example, field, field_to_check))
if message_id is None:
self.assert_no_messages(node)
else:
msg = self.make_message(message_id, node, message_args)
self.assert_adds_messages(node, msg)
@pytest.fixture
def proto3_example(proto_builder, request):
name = request.node.name.translate({ord(c): ord('_') for c in '/.:[]-'})
# TODO: rework proto_builder to just take a syntax argument?
preamble = 'syntax = "proto3";\npackage {};\n'.format(name)
return proto_builder("""
message Example {
message Inner {
int32 sub_count = 1;
}
int32 count = 1;
Inner inner = 2;
// optional string tag = 3;
// optional needs --experimental_allow_proto3_optional, equivalent to the following:
oneof _tag {
string tag = 3;
}
repeated int32 friends = 4;
}
""", preamble=preamble)
class TestMessageApiProto3(CheckerTestCase):
CHECKER_CLASS = pylint_protobuf.ProtobufDescriptorChecker
@pytest.mark.parametrize('field,field_to_check,message_id,message_args', [
('example', 'count', 'protobuf-no-proto3-membership', ('count',)), # unset required
('example', 'tag', None, None), # unset optional
('example', 'inner', None, None), # unset sub_message
('example', 'friends', 'protobuf-no-repeated-membership', None), # repeated field
('example', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Example')), # no such field
('example.inner', 'sub_count', 'protobuf-no-proto3-membership', ('sub_count',)), # unset sub_message required field
('example.inner', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Inner')), # no such field
])
def test_hasfield(self, field, field_to_check, message_id, message_args, proto3_example):
# Traceback (most recent call last):
# File "proto3_hasfield.py", line 3, in <module>
# assert e.HasField('count') # will raise ValueError
# ValueError: Can't test non-optional, non-submessage field "Example.value" for presence in proto3.
node = self.extract_node("""
from {} import Example
example = Example()
{}.HasField({!r})
""".format(proto3_example, field, field_to_check))
if message_id is None:
self.assert_no_messages(node)
else:
msg = self.make_message(message_id, node, message_args)
self.assert_adds_messages(node, msg)
@pytest.mark.parametrize('field,field_to_check,message_id,message_args', [
('example', 'count', 'protobuf-no-proto3-membership', ('count',)), # unset required
('example', 'tag', None, None), # unset optional
('example', 'inner', None, None), # unset sub_message
('example', 'friends', 'protobuf-no-repeated-membership', None), # repeated field
('example', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Example')), # no such field
('example.inner', 'sub_count', 'protobuf-no-proto3-membership', ('sub_count',)), # unset sub_message required field
('example.inner', 'should_warn', 'protobuf-undefined-attribute', ('should_warn', 'Inner')), # no such field
])
def test_clearfield(self, field, field_to_check, message_id, message_args, proto3_example):
node = self.extract_node("""
from {} import Example
example = Example()
{}.ClearField({!r})
""".format(proto3_example, field, field_to_check))
if message_id is None:
self.assert_no_messages(node)
else:
msg = self.make_message(message_id, node, message_args)
self.assert_adds_messages(node, msg)
| 47.708029
| 124
| 0.617503
| 719
| 6,536
| 5.421419
| 0.15299
| 0.036942
| 0.036942
| 0.052335
| 0.82863
| 0.82863
| 0.82863
| 0.777835
| 0.777835
| 0.777835
| 0
| 0.00897
| 0.249541
| 6,536
| 136
| 125
| 48.058824
| 0.785729
| 0.121022
| 0
| 0.739496
| 0
| 0
| 0.430823
| 0.118564
| 0
| 0
| 0
| 0.007353
| 0.067227
| 1
| 0.05042
| false
| 0
| 0.058824
| 0.008403
| 0.159664
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b260d90887db9f32400e48b7cb524813e0f2cfc
| 4,665
|
py
|
Python
|
tests/utils/test_qos.py
|
ovh/depc
|
ce428b6ba790ee4a2e7150b4cb68fdcbfdfae2f4
|
[
"BSD-3-Clause"
] | 77
|
2019-01-30T10:12:36.000Z
|
2021-10-19T16:25:53.000Z
|
tests/utils/test_qos.py
|
ovh/depc
|
ce428b6ba790ee4a2e7150b4cb68fdcbfdfae2f4
|
[
"BSD-3-Clause"
] | 13
|
2019-02-20T16:57:57.000Z
|
2022-03-01T23:10:26.000Z
|
tests/utils/test_qos.py
|
ovh/depc
|
ce428b6ba790ee4a2e7150b4cb68fdcbfdfae2f4
|
[
"BSD-3-Clause"
] | 10
|
2019-01-30T13:30:39.000Z
|
2021-08-02T05:55:18.000Z
|
import pandas as pd
from deepdiff import DeepDiff
from depc.utils.qos import OperationTypes
from depc.utils.qos import _compute_qos
DEFAULT_ARGS = {"agg_op": OperationTypes.AND, "auto_fill": True, "float_decimal": 3}
def test_compute_qos_empty():
expected = {
"qos": None,
"bools_dps": {},
"periods": {"ok": 0, "ko": 0},
}
actual = _compute_qos([], start=0, end=1, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_one_good_datapoint():
data = pd.Series({1595980800: True})
expected = {
"bools_dps": {1595980800: True, 1596067199: True},
"periods": {"ko": 0, "ok": 86399},
"qos": 100.0,
}
actual = _compute_qos([data], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_one_bad_datapoint():
data = pd.Series({1595980800: False})
expected = {
"bools_dps": {1595980800: False, 1596067199: False},
"periods": {"ko": 86399, "ok": 0},
"qos": 0.0,
}
actual = _compute_qos([data], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_one_good_datapoint_no_autofill():
data = pd.Series({1595980800: True})
expected = {
"bools_dps": {1595980800: True},
"periods": {"ko": 0, "ok": 1},
"qos": 100.0,
}
actual = _compute_qos(
[data],
start=1595980800,
end=1596067199,
agg_op=OperationTypes.AND,
auto_fill=False,
float_decimal=3,
)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_one_bad_datapoint_no_autofill():
data = pd.Series({1595980800: False})
expected = {
"bools_dps": {1595980800: False},
"periods": {"ko": 1, "ok": 0},
"qos": 0.0,
}
actual = _compute_qos(
[data],
start=1595980800,
end=1596067199,
agg_op=OperationTypes.AND,
auto_fill=False,
float_decimal=3,
)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_good_values():
data = pd.Series({1595980800: True, 1595994060: True})
expected = {
"bools_dps": {1595980800: True, 1596067199: True},
"periods": {"ko": 0, "ok": 86399},
"qos": 100.0,
}
actual = _compute_qos([data], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_one_minute_downtime():
data = pd.Series({1595980800: True, 1595994000: False, 1595994060: True})
expected = {
"bools_dps": {
1595980800: True,
1595994000: False,
1595994060: True,
1596067199: True,
},
"periods": {"ko": 60, "ok": 86339},
"qos": 99.931,
}
actual = _compute_qos([data], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_two_series():
s1 = pd.Series({1595980800: True, 1595994000: False, 1595994060: True})
s2 = pd.Series({1595980800: True, 1595994001: True, 1595994060: True})
expected = {
"bools_dps": {
1595980800: True,
1595994000: False,
1595994060: True,
1596067199: True,
},
"periods": {"ko": 60, "ok": 86339},
"qos": 99.931,
}
actual = _compute_qos([s1, s2], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_two_series_bad_values():
s1 = pd.Series({1595980800: True, 1595994000: False, 1595994060: True})
s2 = pd.Series({1595980800: False, 1595994000: False, 1595994060: False})
expected = {
"bools_dps": {1595980800: False, 1596067199: False},
"periods": {"ko": 86399, "ok": 0},
"qos": 0.0,
}
actual = _compute_qos([s1, s2], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
def test_compute_qos_two_series_good_values():
s1 = pd.Series({1595980800: True, 1595994000: True, 1595994060: True})
s2 = pd.Series({1595980800: True, 1595994000: True, 1595994060: True})
expected = {
"bools_dps": {1595980800: True, 1596067199: True},
"periods": {"ko": 0, "ok": 86399},
"qos": 100.0,
}
actual = _compute_qos([s1, s2], start=1595980800, end=1596067199, **DEFAULT_ARGS)
assert DeepDiff(expected, actual, ignore_order=True) == {}
| 32.395833
| 85
| 0.615863
| 532
| 4,665
| 5.18985
| 0.12406
| 0.076059
| 0.078233
| 0.061572
| 0.905831
| 0.876132
| 0.865266
| 0.850779
| 0.80623
| 0.80623
| 0
| 0.210334
| 0.236656
| 4,665
| 143
| 86
| 32.622378
| 0.56501
| 0
| 0
| 0.644628
| 0
| 0
| 0.055305
| 0
| 0
| 0
| 0
| 0
| 0.082645
| 1
| 0.082645
| false
| 0
| 0.033058
| 0
| 0.115702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b282ec6f60127cf8b82a36d0c7739879be7776c
| 14,949
|
py
|
Python
|
azure-graphrbac/azure/graphrbac/operations/application_operations_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-graphrbac/azure/graphrbac/operations/application_operations_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
azure-graphrbac/azure/graphrbac/operations/application_operations_operations.py
|
HydAu/AzureSDKForPython
|
5cbe34e9e0b8ea1faacc9f205633ccc0b885c0f3
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class ApplicationOperationsOperations(object):
"""ApplicationOperationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def create(
self, parameters, custom_headers=None, raw=False, **operation_config):
"""
Create a new application. Reference:
http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx
:param parameters: Parameters to create an application.
:type parameters: :class:`ApplicationCreateParameters
<azure.graphrbac.models.ApplicationCreateParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Application <azure.graphrbac.models.Application>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/{tenantID}/applications'
path_format_arguments = {
'tenantID': self._serialize.url("self.config.tenant_id", self.config.tenant_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ApplicationCreateParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('Application', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list(
self, filter=None, custom_headers=None, raw=False, **operation_config):
"""
Lists applications by filter parameters. Reference:
http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx
:param filter: The filters to apply on the operation
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ApplicationPaged
<azure.graphrbac.models.ApplicationPaged>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/{tenantID}/applications'
path_format_arguments = {
'tenantID': self._serialize.url("self.config.tenant_id", self.config.tenant_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ApplicationPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ApplicationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def delete(
self, application_object_id, custom_headers=None, raw=False, **operation_config):
"""
Delete an application. Reference:
http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx
:param application_object_id: Application object id
:type application_object_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/{tenantID}/applications/{applicationObjectId}'
path_format_arguments = {
'applicationObjectId': self._serialize.url("application_object_id", application_object_id, 'str', skip_quote=True),
'tenantID': self._serialize.url("self.config.tenant_id", self.config.tenant_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get(
self, application_object_id, custom_headers=None, raw=False, **operation_config):
"""
Get an application by object Id. Reference:
http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx
:param application_object_id: Application object id
:type application_object_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Application <azure.graphrbac.models.Application>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/{tenantID}/applications/{applicationObjectId}'
path_format_arguments = {
'applicationObjectId': self._serialize.url("application_object_id", application_object_id, 'str', skip_quote=True),
'tenantID': self._serialize.url("self.config.tenant_id", self.config.tenant_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Application', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def patch(
self, application_object_id, parameters, custom_headers=None, raw=False, **operation_config):
"""
Update existing application. Reference:
http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx
:param application_object_id: Application object id
:type application_object_id: str
:param parameters: Parameters to create an application.
:type parameters: :class:`ApplicationCreateParameters
<azure.graphrbac.models.ApplicationCreateParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/{tenantID}/applications/{applicationObjectId}'
path_format_arguments = {
'applicationObjectId': self._serialize.url("application_object_id", application_object_id, 'str', skip_quote=True),
'tenantID': self._serialize.url("self.config.tenant_id", self.config.tenant_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ApplicationCreateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| 43.205202
| 144
| 0.656097
| 1,600
| 14,949
| 5.95375
| 0.125
| 0.04304
| 0.035902
| 0.037791
| 0.832983
| 0.832983
| 0.832983
| 0.810938
| 0.800441
| 0.800441
| 0
| 0.005821
| 0.241555
| 14,949
| 345
| 145
| 43.330435
| 0.834362
| 0.304301
| 0
| 0.760479
| 0
| 0
| 0.141202
| 0.078753
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041916
| false
| 0
| 0.023952
| 0
| 0.125749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a25bd6b5b76095c7d7bf6453e1ae2d75c29fb4c
| 5,212
|
py
|
Python
|
tests/bundles/security/views/security_controller/test_change_password.py
|
achiang/flask-unchained
|
12788a6e618904a25ff2b571eb05ff1dc8f1840f
|
[
"MIT"
] | 69
|
2018-10-10T01:59:11.000Z
|
2022-03-29T17:29:30.000Z
|
tests/bundles/security/views/security_controller/test_change_password.py
|
achiang/flask-unchained
|
12788a6e618904a25ff2b571eb05ff1dc8f1840f
|
[
"MIT"
] | 18
|
2018-11-17T12:42:02.000Z
|
2021-05-22T18:45:27.000Z
|
tests/bundles/security/views/security_controller/test_change_password.py
|
achiang/flask-unchained
|
12788a6e618904a25ff2b571eb05ff1dc8f1840f
|
[
"MIT"
] | 7
|
2018-10-12T16:20:25.000Z
|
2021-10-06T12:18:21.000Z
|
import pytest
from flask_unchained.bundles.security import current_user
@pytest.mark.options(SECURITY_CHANGEABLE=True)
@pytest.mark.usefixtures('user')
class TestHtmlChangePassword:
def test_auth_required(self, client):
r = client.post('security_controller.change_password')
assert r.status_code == 401
def test_fields_required(self, client, templates):
client.login_user()
r = client.post('security_controller.change_password')
assert r.status_code == 200
assert templates[0].template.name == 'security/change_password.html'
assert r.html.count('Password is required.') == 3, r.html
def test_min_length(self, client, templates):
client.login_user()
r = client.post('security_controller.change_password',
data=dict(password='password',
new_password='fail',
new_password_confirm='fail'))
assert r.status_code == 200
assert templates[0].template.name == 'security/change_password.html'
assert 'Password must be at least 8 characters long.' in r.html
def test_new_passwords_match(self, client, templates):
client.login_user()
r = client.post('security_controller.change_password',
data=dict(password='password',
new_password='long enough',
new_password_confirm='but no match'))
assert r.status_code == 200
assert templates[0].template.name == 'security/change_password.html'
assert 'Passwords do not match.' in r.html, r.html
def test_new_same_as_the_old(self, client, templates):
client.login_user()
r = client.post('security_controller.change_password',
data=dict(password='password',
new_password='password',
new_password_confirm='password'))
assert r.status_code == 200
assert templates[0].template.name == 'security/change_password.html'
assert 'Your new password must be different than your previous password.' in r.html
def test_valid_new_password(self, client, user):
client.login_user()
r = client.post('security_controller.change_password',
data=dict(password='password',
new_password='new password',
new_password_confirm='new password'))
assert r.status_code == 302
assert r.path == '/'
client.logout()
client.login_with_creds(user.email, 'new password')
assert current_user == user
@pytest.mark.options(SECURITY_CHANGEABLE=True)
@pytest.mark.usefixtures('user')
class TestApiChangePassword:
def test_auth_required(self, api_client):
r = api_client.post('security_api.change_password')
assert r.status_code == 401
def test_fields_required(self, api_client):
api_client.login_user()
r = api_client.post('security_api.change_password')
assert r.status_code == 400, r.json
assert 'password' in r.errors
assert 'new_password' in r.errors
assert 'new_password_confirm' in r.errors
def test_min_length(self, api_client):
api_client.login_user()
r = api_client.post('security_api.change_password',
data=dict(password='password',
new_password='fail',
new_password_confirm='fail'))
msg = 'Password must be at least 8 characters long.'
assert msg in r.errors['new_password']
def test_new_passwords_match(self, api_client):
api_client.login_user()
r = api_client.post('security_api.change_password',
data=dict(password='password',
new_password='long enough',
new_password_confirm='but no match'))
assert 'new_password_confirm' in r.errors
assert 'Passwords do not match.' in r.errors['new_password_confirm']
def test_new_same_as_the_old(self, api_client):
api_client.login_user()
r = api_client.post('security_api.change_password',
data=dict(password='password',
new_password='password',
new_password_confirm='password'))
msg = 'Your new password must be different than your previous password.'
assert msg in r.errors['new_password']
def test_valid_new_password(self, api_client, user):
api_client.login_user()
r = api_client.post('security_api.change_password',
data=dict(password='password',
new_password='new password',
new_password_confirm='new password'))
assert r.status_code == 200
assert 'token' in r.json
api_client.logout()
api_client.login_with_creds(user.email, 'new password')
assert current_user == user
| 44.169492
| 91
| 0.599962
| 588
| 5,212
| 5.07483
| 0.146259
| 0.117962
| 0.089142
| 0.053619
| 0.912869
| 0.874665
| 0.845174
| 0.790885
| 0.752011
| 0.727882
| 0
| 0.009418
| 0.307368
| 5,212
| 117
| 92
| 44.547009
| 0.817175
| 0
| 0
| 0.67
| 0
| 0
| 0.215848
| 0.094781
| 0
| 0
| 0
| 0
| 0.28
| 1
| 0.12
| false
| 0.61
| 0.02
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8a5bceafecadb5bcf946234a0f494423480ac9ed
| 193
|
py
|
Python
|
gammapy/shower/tests/test_hillas.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
gammapy/shower/tests/test_hillas.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
gammapy/shower/tests/test_hillas.py
|
joleroi/gammapy
|
c4e0c4bd74c79d30e0837559d18b7a1a269f70d9
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function, division
from ...shower import hillas_parameters
def test_hillas_parameters():
pass
| 24.125
| 63
| 0.792746
| 27
| 193
| 5.37037
| 0.814815
| 0.22069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006098
| 0.150259
| 193
| 7
| 64
| 27.571429
| 0.878049
| 0.316062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
8ad573e5deccd20fe3864f8c71020edc13a192b6
| 64,127
|
py
|
Python
|
nicos/devices/tas/spacegroups.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos/devices/tas/spacegroups.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos/devices/tas/spacegroups.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2022 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Georg Brandl <g.brandl@fz-juelich.de>
#
# *****************************************************************************
"""
Crystallographic space-group symmetry information.
Extracted from PowderCell's PCWSPGR.DAT file.
"""
from nicos.core import NicosError
def get_spacegroup(sg):
if isinstance(sg, str):
sghm = sg_by_hm.get(sg.replace(' ', ''))
if sghm is None:
raise NicosError('space group with H-M symbol %r not found' % sg)
sg = sghm
elif isinstance(sg, int):
sg = (sg, 1)
try:
return sg_by_num[sg]
except KeyError:
raise NicosError(
'space group %r not recognized (valid values are: num, (num, '
'setting) or "HMsym")' % (sg,)) from None
def check_refcond(n, h, k, l):
"""Check reflection condition *n* against h, k, l.
The condition number n is the same as in the PowderCell space-group file.
"""
if n == 0:
return True
elif n == 1:
return h % 2 == 0
elif n == 2:
return k % 2 == 0
elif n == 3:
return l % 2 == 0
elif n == 4:
return (k + l) % 2 == 0
elif n == 5:
return (h + l) % 2 == 0
elif n == 6:
return (h + k) % 2 == 0
elif n == 7:
return h % 2 == k % 2 == l % 2
elif n == 8:
return (k + l) % 4 == 0
elif n == 9:
return (h + l) % 4 == 0
elif n == 10:
return (h + k) % 4 == 0
elif n == 11:
return (2*h + l) % 2 == 0
elif n == 12:
return (2*h + l) % 4 == 0
elif n == 13:
return (h + k + l) % 2 == 0
elif n == 14:
return (-h + k + l) % 3 == 0
elif n == 15:
return (h - k + l) % 3 == 0
elif n == 16:
return h % 4 == 0
elif n == 17:
return k % 4 == 0
elif n == 18:
return l % 3 == 0
elif n == 19:
return l % 4 == 0
elif n == 20:
return l % 6 == 0
elif n == 21:
return abs(h) >= abs(k) >= abs(l)
elif n == 22:
return (2*h + k) % 2 == 0
elif n == 23:
return (2*h + k) % 4 == 0
elif n == 24:
return (h + 2*k) % 2 == 0
elif n == 25:
return (h + 2*k) % 4 == 0
elif n == 26:
return h % 2 == 0 and k % 2 == 0
elif n == 27:
return k % 2 == 0 and l % 2 == 0
elif n == 28:
return h % 2 == 0 and l % 2 == 0
elif n == 29:
return (k + l) % 4 == 0 and k % 2 == 0 and l % 2 == 0
elif n == 30:
return (h + l) % 4 == 0 and h % 2 == 0 and l % 2 == 0
elif n == 31:
return (h + k) % 4 == 0 and h % 2 == 0 and k % 2 == 0
else:
assert False, 'invalid condition number'
def can_reflect(sg, h, k, l):
h = int(h)
k = int(k)
l = int(l)
c = sg
# c is an array of 14 condition numbers for the following cases:
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13
# 00L 0K0 0KL H00 H0L HK0 HKL 0KK HH0 HHL H0H HKK HKH HHH
# (all applying cases must be evaluated)
# this one always applies
can = check_refcond(c[6], h, k, l)
# now check the others
if h == 0:
can &= check_refcond(c[2], h, k, l)
if k == 0:
can &= check_refcond(c[0], h, k, l)
elif l == 0:
can &= check_refcond(c[1], h, k, l)
elif k == l:
can &= check_refcond(c[7], h, k, l)
if k == 0:
can &= check_refcond(c[4], h, k, l)
# 00L already checked above
if l == 0:
can &= check_refcond(c[3], h, k, l)
elif h == l:
can &= check_refcond(c[10], h, k, l)
if l == 0:
can &= check_refcond(c[5], h, k, l)
if h == k:
can &= check_refcond(c[8], h, k, l)
if h == k:
can &= check_refcond(c[9], h, k, l)
if h == l:
can &= check_refcond(c[13], h, k, l)
if k == l:
can &= check_refcond(c[11], h, k, l)
if h == l:
can &= check_refcond(c[12], h, k, l)
return can
## output of the script "convert_pcwspgr.py" follows
sg_by_num = {
(1, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 2) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(1, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(1, 4) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(1, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(1, 6) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(2, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 2) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(2, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(2, 4) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(2, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(2, 6) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(3, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 7) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(4, 1) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 2) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 3) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 4) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 5) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 6) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(5, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(5, 2) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(5, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(5, 4) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(5, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(5, 6) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(5, 7) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(5, 8) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(5, 9) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(5, 10) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(5, 11) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(5, 12) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(5, 13) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(5, 14) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(5, 15) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(5, 16) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(5, 17) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(5, 18) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(6, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(6, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(6, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(6, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(6, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(6, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 1) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 2) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 3) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 4) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 5) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 6) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 7) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 8) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 9) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 10) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 11) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 12) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 13) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 14) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 15) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 16) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 17) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(7, 18) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(8, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(8, 2) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(8, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(8, 4) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(8, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(8, 6) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(8, 7) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(8, 8) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(8, 9) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(8, 10) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(8, 11) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(8, 12) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(8, 13) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(8, 14) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(8, 15) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(8, 16) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(8, 17) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(8, 18) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 1) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(9, 2) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(9, 3) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(9, 4) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(9, 5) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(9, 6) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(9, 7) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(9, 8) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(9, 9) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(9, 10) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(9, 11) : [3, 2, 4, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(9, 12) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(9, 13) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 14) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 15) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 16) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 17) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(9, 18) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(10, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(10, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(10, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(10, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(10, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(10, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 1) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 2) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 3) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 4) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 5) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(11, 6) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(12, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(12, 2) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(12, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(12, 4) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(12, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(12, 6) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(12, 7) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(12, 8) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(12, 9) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(12, 10) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(12, 11) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(12, 12) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(12, 13) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(12, 14) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(12, 15) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(12, 16) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(12, 17) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(12, 18) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(13, 1) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 2) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 3) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 4) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 5) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 6) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 7) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 8) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 9) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 10) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 11) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 12) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 13) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 14) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 15) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 16) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 17) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(13, 18) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 1) : [3, 2, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 2) : [0, 2, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 3) : [3, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 4) : [3, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 5) : [0, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 6) : [3, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 7) : [3, 2, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 8) : [3, 2, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 9) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 10) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 11) : [3, 2, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 12) : [3, 2, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 13) : [0, 2, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 14) : [3, 2, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 15) : [3, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 16) : [3, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 17) : [3, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(14, 18) : [0, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(15, 1) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(15, 2) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(15, 3) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(15, 4) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(15, 5) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(15, 6) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(15, 7) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(15, 8) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(15, 9) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(15, 10) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(15, 11) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(15, 12) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(15, 13) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(15, 14) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(15, 15) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(15, 16) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(15, 17) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(15, 18) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(16, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(16, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(16, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(16, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(16, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(16, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 2) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 3) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 4) : [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 5) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(17, 6) : [0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 1) : [0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 2) : [0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 3) : [3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 4) : [3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 5) : [3, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(18, 6) : [3, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 2) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 3) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 4) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 5) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(19, 6) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(20, 1) : [3, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(20, 2) : [3, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(20, 3) : [3, 2, 4, 1, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(20, 4) : [3, 2, 4, 1, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(20, 5) : [3, 2, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(20, 6) : [3, 2, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(21, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(21, 2) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(21, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(21, 4) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(21, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(21, 6) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(22, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(22, 2) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(22, 3) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(22, 4) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(22, 5) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(22, 6) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(23, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(23, 2) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(23, 3) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(23, 4) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(23, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(23, 6) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 2) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 3) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 4) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(24, 6) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(25, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(25, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(25, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(25, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(25, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(25, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 1) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 2) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 3) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 4) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 5) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(26, 6) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 2) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 3) : [0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 4) : [0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 5) : [0, 2, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(27, 6) : [0, 2, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 1) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 2) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 3) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 4) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 5) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(28, 6) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 1) : [3, 0, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 2) : [3, 2, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 3) : [0, 2, 0, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 4) : [3, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 5) : [3, 2, 3, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(29, 6) : [0, 2, 2, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 1) : [3, 2, 4, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 2) : [3, 0, 3, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 3) : [3, 0, 0, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 4) : [0, 2, 0, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 5) : [0, 2, 2, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(30, 6) : [3, 2, 4, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 1) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 2) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 3) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 4) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 5) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(31, 6) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 1) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 2) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 3) : [3, 2, 0, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 4) : [3, 2, 0, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 5) : [3, 0, 3, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(32, 6) : [3, 0, 3, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 1) : [3, 2, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 2) : [3, 2, 2, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 3) : [3, 2, 0, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 4) : [3, 2, 0, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 5) : [3, 2, 3, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(33, 6) : [3, 2, 4, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 2) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 3) : [3, 2, 0, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 4) : [3, 2, 0, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 5) : [3, 2, 4, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(34, 6) : [3, 2, 4, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(35, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(35, 2) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(35, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(35, 4) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(35, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(35, 6) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(36, 1) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(36, 2) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(36, 3) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(36, 4) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(36, 5) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(36, 6) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(37, 1) : [3, 2, 27, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(37, 2) : [3, 2, 27, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(37, 3) : [3, 2, 4, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(37, 4) : [3, 2, 4, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(37, 5) : [3, 2, 27, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(37, 6) : [3, 2, 27, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(38, 1) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(38, 2) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(38, 3) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(38, 4) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(38, 5) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(38, 6) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(39, 1) : [3, 2, 27, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(39, 2) : [3, 0, 3, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(39, 3) : [3, 0, 3, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(39, 4) : [0, 2, 2, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(39, 5) : [0, 2, 2, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(39, 6) : [3, 2, 27, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(40, 1) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(40, 2) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(40, 3) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(40, 4) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(40, 5) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(40, 6) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(41, 1) : [3, 2, 27, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(41, 2) : [3, 2, 27, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(41, 3) : [3, 2, 3, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(41, 4) : [3, 2, 2, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(41, 5) : [3, 2, 27, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(41, 6) : [3, 2, 27, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(42, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(42, 2) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(42, 3) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(42, 4) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(42, 5) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(42, 6) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(43, 1) : [19, 17, 29, 16, 30, 0, 7, 29, 0, 0, 30, 0, 0, 0],
(43, 2) : [19, 17, 29, 16, 30, 0, 7, 29, 0, 0, 30, 0, 0, 0],
(43, 3) : [19, 17, 0, 16, 30, 31, 7, 0, 31, 0, 30, 0, 0, 0],
(43, 4) : [19, 17, 0, 16, 30, 31, 7, 0, 31, 0, 30, 0, 0, 0],
(43, 5) : [19, 17, 29, 16, 0, 31, 7, 29, 31, 0, 0, 0, 0, 0],
(43, 6) : [19, 17, 29, 16, 0, 31, 7, 29, 31, 0, 0, 0, 0, 0],
(44, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(44, 2) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(44, 3) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(44, 4) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(44, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(44, 6) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 1) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 2) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 3) : [3, 2, 4, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 4) : [3, 2, 4, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 5) : [3, 2, 27, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(45, 6) : [3, 2, 27, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 1) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 2) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 3) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 4) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 5) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(46, 6) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(47, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(47, 2) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(47, 3) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(47, 4) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(47, 5) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(47, 6) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 3) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 4) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 5) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 6) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 7) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 8) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 9) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 10) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 11) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(48, 12) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 2) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 3) : [0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 4) : [0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 5) : [0, 2, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(49, 6) : [0, 2, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 1) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 2) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 3) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 4) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 5) : [3, 2, 4, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 6) : [3, 2, 4, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 7) : [3, 2, 4, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 8) : [3, 2, 4, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 9) : [3, 0, 3, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 10) : [3, 0, 3, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 11) : [3, 0, 3, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(50, 12) : [3, 0, 3, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 1) : [0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 2) : [0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 3) : [0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 4) : [3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 5) : [3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(51, 6) : [0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 1) : [3, 2, 4, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 2) : [3, 2, 4, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 3) : [3, 2, 2, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 4) : [3, 2, 3, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 5) : [3, 2, 4, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(52, 6) : [3, 2, 4, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 1) : [3, 0, 0, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 2) : [3, 2, 4, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 3) : [0, 2, 2, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 4) : [3, 0, 3, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 5) : [3, 2, 4, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(53, 6) : [0, 2, 0, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 1) : [3, 0, 3, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 2) : [3, 2, 3, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 3) : [0, 2, 2, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 4) : [3, 0, 3, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 5) : [3, 2, 2, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(54, 6) : [0, 2, 2, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 1) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 2) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 3) : [3, 2, 0, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 4) : [3, 2, 0, 0, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 5) : [3, 0, 3, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(55, 6) : [3, 0, 3, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 1) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 2) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 3) : [3, 2, 4, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 4) : [3, 2, 4, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 5) : [3, 2, 2, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(56, 6) : [3, 2, 2, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 1) : [3, 2, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 2) : [3, 0, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 3) : [3, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 4) : [0, 2, 0, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 5) : [0, 2, 2, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(57, 6) : [3, 2, 3, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 2) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 3) : [3, 2, 0, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 4) : [3, 2, 0, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 5) : [3, 2, 4, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(58, 6) : [3, 2, 4, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 1) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 2) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 3) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 4) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 5) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 6) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 7) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 8) : [3, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 9) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 10) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 11) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(59, 12) : [3, 0, 0, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 1) : [3, 2, 2, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 2) : [3, 2, 3, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 3) : [3, 2, 4, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 4) : [3, 2, 4, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 5) : [3, 2, 2, 1, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(60, 6) : [3, 2, 3, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 1) : [3, 2, 2, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 2) : [3, 2, 3, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 3) : [3, 2, 2, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 4) : [3, 2, 3, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 5) : [3, 2, 2, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(61, 6) : [3, 2, 3, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 1) : [3, 2, 4, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 2) : [3, 2, 0, 1, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 3) : [3, 2, 2, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 4) : [3, 2, 3, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 5) : [3, 2, 0, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(62, 6) : [3, 2, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(63, 1) : [3, 2, 2, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(63, 2) : [3, 2, 27, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(63, 3) : [3, 2, 4, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(63, 4) : [3, 2, 4, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(63, 5) : [3, 2, 27, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(63, 6) : [3, 2, 3, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(64, 1) : [3, 2, 2, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(64, 2) : [3, 2, 27, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(64, 3) : [3, 2, 27, 1, 3, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(64, 4) : [3, 2, 27, 1, 28, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(64, 5) : [3, 2, 27, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(64, 6) : [3, 2, 3, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(65, 1) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(65, 2) : [0, 2, 2, 1, 1, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(65, 3) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(65, 4) : [3, 2, 4, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(65, 5) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(65, 6) : [3, 0, 3, 1, 5, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(66, 1) : [3, 2, 27, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(66, 2) : [3, 2, 27, 1, 28, 6, 6, 0, 0, 0, 0, 0, 0, 0],
(66, 3) : [3, 2, 4, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(66, 4) : [3, 2, 4, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(66, 5) : [3, 2, 27, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(66, 6) : [3, 2, 27, 1, 5, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(67, 1) : [0, 2, 2, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(67, 2) : [0, 2, 2, 1, 1, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(67, 3) : [3, 2, 27, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(67, 4) : [3, 2, 27, 0, 3, 2, 4, 0, 0, 0, 0, 0, 0, 0],
(67, 5) : [3, 0, 3, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(67, 6) : [3, 0, 3, 1, 28, 1, 5, 0, 0, 0, 0, 0, 0, 0],
(68, 1) : [3, 2, 27, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(68, 2) : [3, 2, 27, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(68, 3) : [3, 2, 27, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(68, 4) : [3, 2, 27, 1, 28, 26, 6, 0, 0, 0, 0, 0, 0, 0],
(68, 5) : [3, 2, 27, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(68, 6) : [3, 2, 27, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(68, 7) : [3, 2, 27, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(68, 8) : [3, 2, 27, 1, 28, 26, 4, 0, 0, 0, 0, 0, 0, 0],
(68, 9) : [3, 2, 27, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(68, 10) : [3, 2, 27, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(68, 11) : [3, 2, 27, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(68, 12) : [3, 2, 27, 1, 28, 26, 5, 0, 0, 0, 0, 0, 0, 0],
(69, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(69, 2) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(69, 3) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(69, 4) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(69, 5) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(69, 6) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 0, 0, 0, 0, 0],
(70, 1) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 2) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 3) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 4) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 5) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 6) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 7) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 8) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 9) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 10) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 11) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(70, 12) : [19, 17, 29, 16, 30, 31, 7, 29, 31, 0, 30, 0, 0, 0],
(71, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(71, 2) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(71, 3) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(71, 4) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(71, 5) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(71, 6) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 1) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 2) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 3) : [3, 2, 4, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 4) : [3, 2, 4, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 5) : [3, 2, 27, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(72, 6) : [3, 2, 27, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 1) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 2) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 3) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 4) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 5) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(73, 6) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 1) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 2) : [3, 2, 4, 1, 5, 26, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 3) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 4) : [3, 2, 27, 1, 5, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 5) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(74, 6) : [3, 2, 4, 1, 28, 6, 13, 0, 0, 0, 0, 0, 0, 0],
(75, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(76, 1) : [19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(77, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(78, 1) : [19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(79, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(80, 1) : [19, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(81, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(82, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(83, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(84, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(85, 1) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(85, 2) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(86, 1) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(86, 2) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(87, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(88, 1) : [19, 2, 4, 1, 5, 26, 13, 0, 0, 3, 0, 0, 0, 0],
(88, 2) : [19, 2, 4, 1, 5, 26, 13, 0, 0, 3, 0, 0, 0, 0],
(89, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(90, 1) : [0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(91, 1) : [19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(92, 1) : [19, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(93, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(94, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(95, 1) : [19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(96, 1) : [19, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(97, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(98, 1) : [19, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(99, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(100, 1) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(101, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(102, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(103, 1) : [3, 0, 3, 0, 3, 0, 0, 2, 0, 3, 1, 0, 0, 1],
(104, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(105, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(106, 1) : [3, 2, 2, 1, 1, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(107, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(108, 1) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(109, 1) : [19, 2, 4, 1, 5, 6, 13, 0, 1, 12, 0, 0, 0, 12],
(110, 1) : [19, 2, 27, 1, 28, 6, 13, 2, 1, 12, 1, 0, 0, 12],
(111, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(112, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(113, 1) : [0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(114, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(115, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(116, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(117, 1) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(118, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(119, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(120, 1) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(121, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(122, 1) : [19, 2, 4, 1, 5, 6, 13, 0, 1, 12, 0, 0, 0, 12],
(123, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(124, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(125, 1) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(125, 2) : [0, 2, 2, 1, 1, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(126, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(126, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(127, 1) : [0, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(128, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(129, 1) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(129, 2) : [0, 2, 0, 1, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(130, 1) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(130, 2) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(131, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(132, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(133, 1) : [3, 2, 2, 1, 1, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(133, 2) : [3, 2, 2, 1, 1, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(134, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(134, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(135, 1) : [3, 2, 2, 1, 1, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(136, 1) : [3, 2, 4, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(137, 1) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(137, 2) : [3, 2, 0, 1, 0, 6, 0, 0, 0, 3, 0, 0, 0, 1],
(138, 1) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(138, 2) : [3, 2, 3, 1, 3, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(139, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(140, 1) : [3, 2, 27, 1, 28, 6, 13, 0, 0, 3, 0, 0, 0, 0],
(141, 1) : [19, 2, 4, 1, 5, 26, 13, 0, 0, 12, 0, 0, 0, 12],
(141, 2) : [19, 2, 4, 1, 5, 26, 13, 0, 0, 12, 0, 0, 0, 12],
(142, 1) : [19, 2, 27, 1, 28, 26, 13, 0, 0, 12, 0, 0, 0, 12],
(142, 2) : [19, 2, 27, 1, 28, 26, 13, 0, 0, 12, 0, 0, 0, 12],
(143, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(144, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(145, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(146, 1) : [18, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(146, 2) : [0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0],
(147, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(148, 1) : [18, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(148, 2) : [0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0],
(149, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(150, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(151, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(152, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(153, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(154, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(155, 1) : [18, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(155, 2) : [0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0],
(156, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(157, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(158, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(159, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 3],
(160, 1) : [18, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(160, 2) : [0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0],
(161, 1) : [20, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(161, 2) : [3, 2, 0, 1, 0, 0, 21, 0, 0, 3, 0, 1, 2, 1],
(162, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(163, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 3],
(164, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(165, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(166, 1) : [18, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(166, 2) : [0, 0, 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0],
(167, 1) : [20, 0, 0, 0, 0, 0, 14, 0, 0, 18, 0, 0, 0, 0],
(167, 2) : [3, 2, 0, 1, 0, 0, 21, 0, 0, 3, 0, 1, 2, 1],
(168, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(169, 1) : [20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(170, 1) : [20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(171, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(172, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(173, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(174, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(175, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(176, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(177, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(178, 1) : [20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(179, 1) : [20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(180, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(181, 1) : [18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(182, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(183, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(184, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(185, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(186, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(187, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(188, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(189, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(190, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0],
(191, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(192, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(193, 1) : [3, 0, 3, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(194, 1) : [3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1],
(195, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(196, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(197, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(198, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(199, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(200, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(201, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(201, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(202, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(203, 1) : [19, 17, 8, 16, 30, 31, 7, 0, 0, 5, 0, 6, 6, 0],
(203, 2) : [19, 17, 8, 16, 30, 31, 7, 0, 0, 5, 0, 6, 6, 0],
(204, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(205, 1) : [3, 2, 2, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0],
(206, 1) : [3, 2, 27, 1, 28, 26, 13, 0, 0, 3, 0, 1, 2, 0],
(207, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(208, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(209, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(210, 1) : [19, 17, 27, 16, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(211, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(212, 1) : [19, 17, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(213, 1) : [19, 17, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(214, 1) : [19, 17, 4, 16, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(215, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(216, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(217, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(218, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 3, 0, 1, 2, 1],
(219, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 28, 0, 26, 26, 1],
(220, 1) : [19, 17, 4, 16, 5, 6, 13, 0, 12, 12, 0, 25, 23, 12],
(221, 1) : [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(222, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 3, 0, 1, 2, 1],
(222, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 3, 0, 1, 2, 1],
(223, 1) : [3, 2, 0, 1, 0, 0, 0, 0, 0, 3, 0, 1, 2, 1],
(224, 1) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(224, 2) : [3, 2, 4, 1, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0],
(225, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 5, 0, 6, 6, 0],
(226, 1) : [3, 2, 27, 1, 28, 26, 7, 0, 0, 28, 0, 26, 26, 1],
(227, 1) : [19, 17, 8, 16, 9, 31, 7, 0, 0, 5, 0, 6, 6, 0],
(227, 2) : [19, 17, 8, 16, 9, 31, 7, 0, 0, 5, 0, 6, 6, 0],
(228, 1) : [19, 17, 8, 16, 9, 31, 7, 0, 0, 28, 0, 26, 26, 1],
(228, 2) : [19, 17, 8, 16, 9, 31, 7, 0, 0, 28, 0, 26, 26, 1],
(229, 1) : [3, 2, 4, 1, 5, 6, 13, 0, 0, 3, 0, 1, 2, 0],
(230, 1) : [19, 17, 27, 16, 28, 26, 13, 0, 0, 12, 0, 25, 23, 12],
}
sg_by_hm = {
'A-1' : (2, 3),
'A1' : (1, 3),
'A112' : (5, 3),
'A112/a' : (15, 3),
'A112/m' : (12, 3),
'A112/n' : (15, 10),
'A11a' : (9, 3),
'A11m' : (8, 3),
'A11n' : (9, 10),
'A12/a1' : (15, 2),
'A12/m1' : (12, 2),
'A12/n1' : (15, 7),
'A121' : (5, 2),
'A1a1' : (9, 2),
'A1m1' : (8, 2),
'A1n1' : (9, 7),
'A222' : (21, 3),
'A2_122' : (20, 3),
'A2_1am' : (36, 4),
'A2_1ma' : (36, 3),
'A2aa' : (37, 3),
'A2mm' : (35, 3),
'Aba2' : (41, 1),
'Abaa' : (68, 5),
'Abm2' : (39, 1),
'Abma' : (64, 3),
'Abmm' : (67, 3),
'Ac2a' : (41, 6),
'Ac2m' : (39, 6),
'Acaa' : (68, 7),
'Acam' : (64, 4),
'Acmm' : (67, 4),
'Aea2' : (41, 1),
'Aem2' : (39, 1),
'Am2a' : (40, 6),
'Am2m' : (38, 6),
'Ama2' : (40, 1),
'Amaa' : (66, 3),
'Amam' : (63, 4),
'Amm2' : (38, 1),
'Amma' : (63, 3),
'Ammm' : (65, 3),
'B-1' : (2, 4),
'B1' : (1, 4),
'B112' : (5, 4),
'B112/b' : (15, 4),
'B112/m' : (12, 4),
'B112/n' : (15, 9),
'B11b' : (9, 4),
'B11m' : (8, 4),
'B11n' : (9, 9),
'B2/b11' : (15, 5),
'B2/m11' : (12, 5),
'B2/n11' : (15, 12),
'B211' : (5, 5),
'B222' : (21, 5),
'B22_12' : (20, 5),
'B2cb' : (41, 3),
'B2cm' : (39, 3),
'B2mb' : (40, 3),
'B2mm' : (38, 3),
'Bb11' : (9, 5),
'Bb2_1m' : (36, 5),
'Bb2b' : (37, 5),
'Bba2' : (41, 2),
'Bbab' : (68, 11),
'Bbcb' : (68, 9),
'Bbcm' : (64, 5),
'Bbm2' : (40, 2),
'Bbmb' : (66, 5),
'Bbmm' : (63, 5),
'Bm11' : (8, 5),
'Bm2_1b' : (36, 6),
'Bm2m' : (35, 5),
'Bma2' : (39, 2),
'Bmab' : (64, 6),
'Bmam' : (67, 6),
'Bmcm' : (67, 5),
'Bmm2' : (38, 2),
'Bmmb' : (63, 6),
'Bmmm' : (65, 5),
'Bn11' : (9, 12),
'C-1' : (2, 2),
'C1' : (1, 2),
'C12/c1' : (15, 1),
'C12/m1' : (12, 1),
'C12/n1' : (15, 8),
'C121' : (5, 1),
'C1c1' : (9, 1),
'C1m1' : (8, 1),
'C1n1' : (9, 8),
'C2' : (5, 1),
'C2/c' : (15, 1),
'C2/c11' : (15, 6),
'C2/m' : (12, 1),
'C2/m11' : (12, 6),
'C2/n11' : (15, 11),
'C211' : (5, 6),
'C222' : (21, 1),
'C222_1' : (20, 1),
'C2cb' : (41, 4),
'C2cm' : (40, 4),
'C2mb' : (39, 4),
'C2mm' : (38, 4),
'Cc' : (9, 1),
'Cc11' : (9, 6),
'Cc2a' : (41, 5),
'Cc2m' : (40, 5),
'Ccc2' : (37, 1),
'Ccca' : (68, 1),
'Cccb' : (68, 3),
'Ccce' : (68, 1),
'Cccm' : (66, 1),
'Ccm2_1' : (36, 2),
'Ccmb' : (64, 2),
'Ccmm' : (63, 2),
'Cm' : (8, 1),
'Cm11' : (8, 6),
'Cm2a' : (39, 5),
'Cm2m' : (38, 5),
'Cmc2_1' : (36, 1),
'Cmca' : (64, 1),
'Cmce' : (64, 1),
'Cmcm' : (63, 1),
'Cmm2' : (35, 1),
'Cmma' : (67, 1),
'Cmmb' : (67, 2),
'Cmme' : (67, 1),
'Cmmm' : (65, 1),
'Cn11' : (9, 11),
'F-1' : (2, 6),
'F-43c' : (219, 1),
'F-43m' : (216, 1),
'F1' : (1, 6),
'F121' : (3, 7),
'F2/d-3' : (203, 1),
'F2/m-3' : (202, 1),
'F222' : (22, 1),
'F23' : (196, 1),
'F2dd' : (43, 3),
'F2mm' : (42, 3),
'F4/m-32/c': (226, 1),
'F4/m-32/m': (225, 1),
'F432' : (209, 1),
'F4_1/d-32/c': (228, 1),
'F4_1/d-32/m': (227, 1),
'F4_132' : (210, 1),
'Fd-3' : (203, 1),
'Fd-3c' : (228, 1),
'Fd-3m' : (227, 1),
'Fd2d' : (43, 5),
'Fdd2' : (43, 1),
'Fddd' : (70, 1),
'Fm-3' : (202, 1),
'Fm-3c' : (226, 1),
'Fm-3m' : (225, 1),
'Fm2m' : (42, 5),
'Fmm2' : (42, 1),
'Fmmm' : (69, 1),
'I-1' : (2, 5),
'I-4' : (82, 1),
'I-42d' : (122, 1),
'I-42m' : (121, 1),
'I-43d' : (220, 1),
'I-43m' : (217, 1),
'I-4c2' : (120, 1),
'I-4m2' : (119, 1),
'I1' : (1, 5),
'I112' : (5, 15),
'I112/a' : (15, 16),
'I112/b' : (15, 15),
'I112/m' : (12, 15),
'I11a' : (9, 16),
'I11b' : (9, 15),
'I11m' : (8, 15),
'I12/a1' : (15, 13),
'I12/c1' : (15, 14),
'I12/m1' : (12, 13),
'I121' : (5, 13),
'I1a1' : (9, 13),
'I1c1' : (9, 14),
'I1m1' : (8, 13),
'I2/b11' : (15, 18),
'I2/c11' : (15, 17),
'I2/m-3' : (204, 1),
'I2/m11' : (12, 17),
'I211' : (5, 17),
'I222' : (23, 1),
'I23' : (197, 1),
'I2_1/a-3': (206, 1),
'I2_1/m2_1/m2_1/a': (74, 1),
'I2_12_12_1': (24, 1),
'I2_13' : (199, 1),
'I2cb' : (45, 3),
'I2cm' : (46, 4),
'I2mb' : (46, 3),
'I2mm' : (44, 3),
'I4' : (79, 1),
'I4/m' : (87, 1),
'I4/m-32/m': (229, 1),
'I4/m2/c2/m': (140, 1),
'I4/m2/m2/m': (139, 1),
'I4/mcm' : (140, 1),
'I4/mmm' : (139, 1),
'I422' : (97, 1),
'I432' : (211, 1),
'I4_1' : (80, 1),
'I4_1/a' : (88, 1),
'I4_1/a-32/d': (230, 1),
'I4_1/a2/c2/d': (142, 1),
'I4_1/a2/m2/d': (141, 1),
'I4_1/acd': (142, 1),
'I4_1/amd': (141, 1),
'I4_122' : (98, 1),
'I4_132' : (214, 1),
'I4_1cd' : (110, 1),
'I4_1md' : (109, 1),
'I4cm' : (108, 1),
'I4mm' : (107, 1),
'Ia-3' : (206, 1),
'Ia-3d' : (230, 1),
'Ib11' : (9, 18),
'Iba2' : (45, 1),
'Ibam' : (72, 1),
'Ibca' : (73, 1),
'Ibm2' : (46, 2),
'Ibmm' : (74, 3),
'Ic11' : (9, 17),
'Ic2a' : (45, 5),
'Ic2m' : (46, 5),
'Icab' : (73, 2),
'Icma' : (72, 5),
'Icmm' : (74, 4),
'Im-3' : (204, 1),
'Im-3m' : (229, 1),
'Im11' : (8, 17),
'Im2a' : (46, 6),
'Im2m' : (44, 5),
'Ima2' : (46, 1),
'Imam' : (74, 6),
'Imcb' : (72, 3),
'Imcm' : (74, 5),
'Imm2' : (44, 1),
'Imma' : (74, 1),
'Immb' : (74, 2),
'Immm' : (71, 1),
'P-1' : (2, 1),
'P-3' : (147, 1),
'P-312/c': (163, 1),
'P-312/m': (162, 1),
'P-31c' : (163, 1),
'P-31m' : (162, 1),
'P-32/c1': (165, 1),
'P-32/m1': (164, 1),
'P-3c1' : (165, 1),
'P-3m1' : (164, 1),
'P-4' : (81, 1),
'P-42_1c': (114, 1),
'P-42_1m': (113, 1),
'P-42c' : (112, 1),
'P-42m' : (111, 1),
'P-43m' : (215, 1),
'P-43n' : (218, 1),
'P-4b2' : (117, 1),
'P-4c2' : (116, 1),
'P-4m2' : (115, 1),
'P-4n2' : (118, 1),
'P-6' : (174, 1),
'P-62c' : (190, 1),
'P-62m' : (189, 1),
'P-6c2' : (188, 1),
'P-6m2' : (187, 1),
'P1' : (1, 1),
'P112' : (3, 3),
'P112/a' : (13, 3),
'P112/b' : (13, 4),
'P112/m' : (10, 3),
'P112/n' : (13, 9),
'P112_1' : (4, 3),
'P112_1/a': (14, 3),
'P112_1/b': (14, 4),
'P112_1/m': (11, 3),
'P112_1/n': (14, 9),
'P11a' : (7, 3),
'P11b' : (7, 4),
'P11m' : (6, 3),
'P11n' : (7, 9),
'P12/a1' : (13, 2),
'P12/c1' : (13, 1),
'P12/m1' : (10, 1),
'P12/n1' : (13, 7),
'P121' : (3, 1),
'P12_1/a1': (14, 2),
'P12_1/c1': (14, 1),
'P12_1/m1': (11, 1),
'P12_1/n1': (14, 7),
'P12_11' : (4, 1),
'P1a1' : (7, 2),
'P1c1' : (7, 1),
'P1m1' : (6, 1),
'P1n1' : (7, 7),
'P2' : (3, 1),
'P2/b11' : (13, 5),
'P2/c' : (13, 1),
'P2/c11' : (13, 6),
'P2/m' : (10, 1),
'P2/m-3' : (200, 1),
'P2/m11' : (10, 5),
'P2/n-3' : (201, 1),
'P2/n11' : (13, 11),
'P211' : (3, 5),
'P222' : (16, 1),
'P222_1' : (17, 1),
'P22_12' : (17, 5),
'P22_12_1': (18, 3),
'P23' : (195, 1),
'P2_1' : (4, 1),
'P2_1/a-3': (205, 1),
'P2_1/b11': (14, 5),
'P2_1/c' : (14, 1),
'P2_1/c11': (14, 6),
'P2_1/m' : (11, 1),
'P2_1/m11': (11, 5),
'P2_1/n11': (14, 11),
'P2_111' : (4, 5),
'P2_122' : (17, 3),
'P2_122_1': (18, 5),
'P2_12_12': (18, 1),
'P2_12_12_1': (19, 1),
'P2_13' : (198, 1),
'P2_1ab' : (29, 3),
'P2_1am' : (26, 4),
'P2_1ca' : (29, 4),
'P2_1cn' : (33, 4),
'P2_1ma' : (26, 3),
'P2_1mn' : (31, 3),
'P2_1nb' : (33, 3),
'P2_1nm' : (31, 4),
'P2aa' : (27, 3),
'P2an' : (30, 4),
'P2cb' : (32, 3),
'P2cm' : (28, 4),
'P2mb' : (28, 3),
'P2mm' : (25, 3),
'P2na' : (30, 3),
'P2nn' : (34, 3),
'P3' : (143, 1),
'P312' : (149, 1),
'P31c' : (159, 1),
'P31m' : (157, 1),
'P321' : (150, 1),
'P3_1' : (144, 1),
'P3_112' : (151, 1),
'P3_121' : (152, 1),
'P3_2' : (145, 1),
'P3_212' : (153, 1),
'P3_221' : (154, 1),
'P3c1' : (158, 1),
'P3m1' : (156, 1),
'P4' : (75, 1),
'P4/m' : (83, 1),
'P4/m-32/m': (221, 1),
'P4/m2/c2/c': (124, 1),
'P4/m2/m2/m': (123, 1),
'P4/m2_1/b2/m': (127, 1),
'P4/m2_1/n2/c': (128, 1),
'P4/mbm' : (127, 1),
'P4/mcc' : (124, 1),
'P4/mmm' : (123, 1),
'P4/mnc' : (128, 1),
'P4/n' : (85, 1),
'P4/n-32/n': (222, 1),
'P4/n2/b2/m': (125, 1),
'P4/n2/n2/c': (126, 1),
'P4/n2_1/c2/c': (130, 1),
'P4/n2_1/m2/m': (129, 1),
'P4/nbm' : (125, 1),
'P4/ncc' : (130, 1),
'P4/nmm' : (129, 1),
'P4/nnc' : (126, 1),
'P422' : (89, 1),
'P42_12' : (90, 1),
'P432' : (207, 1),
'P4_1' : (76, 1),
'P4_122' : (91, 1),
'P4_12_12': (92, 1),
'P4_132' : (213, 1),
'P4_2' : (77, 1),
'P4_2/m' : (84, 1),
'P4_2/m-32/n': (223, 1),
'P4_2/m2/c2/m': (132, 1),
'P4_2/m2/m2/c': (131, 1),
'P4_2/m2_1/b2/c': (135, 1),
'P4_2/m2_1/n2/m': (136, 1),
'P4_2/mbc': (135, 1),
'P4_2/mcm': (132, 1),
'P4_2/mmc': (131, 1),
'P4_2/mnm': (136, 1),
'P4_2/n' : (86, 1),
'P4_2/n-32/m': (224, 1),
'P4_2/n2/b2/c': (133, 1),
'P4_2/n2/n2/m': (134, 1),
'P4_2/n2_1/c2/m': (138, 1),
'P4_2/n2_1/m2/c': (137, 1),
'P4_2/nbc': (133, 1),
'P4_2/ncm': (138, 1),
'P4_2/nmc': (137, 1),
'P4_2/nnm': (134, 1),
'P4_222' : (93, 1),
'P4_22_12': (94, 1),
'P4_232' : (208, 1),
'P4_2bc' : (106, 1),
'P4_2cm' : (101, 1),
'P4_2mc' : (105, 1),
'P4_2nm' : (102, 1),
'P4_3' : (78, 1),
'P4_322' : (95, 1),
'P4_32_12': (96, 1),
'P4_332' : (212, 1),
'P4bm' : (100, 1),
'P4cc' : (103, 1),
'P4mm' : (99, 1),
'P4nc' : (104, 1),
'P6' : (168, 1),
'P6/m' : (175, 1),
'P6/m2/c2/c': (192, 1),
'P6/m2/m2/m': (191, 1),
'P6/mcc' : (192, 1),
'P6/mmm' : (191, 1),
'P622' : (177, 1),
'P6_1' : (169, 1),
'P6_122' : (178, 1),
'P6_2' : (171, 1),
'P6_222' : (180, 1),
'P6_3' : (173, 1),
'P6_3/m' : (176, 1),
'P6_3/m2/c2/m': (193, 1),
'P6_3/m2/m2/c': (194, 1),
'P6_3/mcm': (193, 1),
'P6_3/mmc': (194, 1),
'P6_322' : (182, 1),
'P6_3cm' : (185, 1),
'P6_3mc' : (186, 1),
'P6_4' : (172, 1),
'P6_422' : (181, 1),
'P6_5' : (170, 1),
'P6_522' : (179, 1),
'P6cc' : (184, 1),
'P6mm' : (183, 1),
'Pa-3' : (205, 1),
'Pb11' : (7, 5),
'Pb2_1a' : (29, 6),
'Pb2_1m' : (26, 5),
'Pb2b' : (27, 5),
'Pb2n' : (30, 5),
'Pba2' : (32, 1),
'Pbaa' : (54, 3),
'Pbab' : (54, 6),
'Pbam' : (55, 1),
'Pban' : (50, 1),
'Pbc2_1' : (29, 2),
'Pbca' : (61, 1),
'Pbcb' : (54, 5),
'Pbcm' : (57, 1),
'Pbcn' : (60, 1),
'Pbm2' : (28, 2),
'Pbma' : (57, 5),
'Pbmb' : (49, 5),
'Pbmm' : (51, 3),
'Pbmn' : (53, 3),
'Pbn2_1' : (33, 2),
'Pbna' : (60, 5),
'Pbnb' : (56, 5),
'Pbnm' : (62, 3),
'Pbnn' : (52, 3),
'Pc' : (7, 1),
'Pc11' : (7, 6),
'Pc2_1b' : (29, 5),
'Pc2_1n' : (33, 5),
'Pc2a' : (32, 5),
'Pc2m' : (28, 5),
'Pca2_1' : (29, 1),
'Pcaa' : (54, 4),
'Pcab' : (61, 2),
'Pcam' : (57, 2),
'Pcan' : (60, 2),
'Pcc2' : (27, 1),
'Pcca' : (54, 1),
'Pccb' : (54, 2),
'Pccm' : (49, 1),
'Pccn' : (56, 1),
'Pcm2_1' : (26, 2),
'Pcma' : (55, 5),
'Pcmb' : (57, 6),
'Pcmm' : (51, 4),
'Pcmn' : (62, 4),
'Pcn2' : (30, 2),
'Pcna' : (50, 9),
'Pcnb' : (60, 6),
'Pcnm' : (53, 4),
'Pcnn' : (52, 4),
'Pm' : (6, 1),
'Pm-3' : (200, 1),
'Pm-3m' : (221, 1),
'Pm-3n' : (223, 1),
'Pm11' : (6, 5),
'Pm2_1b' : (26, 6),
'Pm2_1n' : (31, 6),
'Pm2a' : (28, 6),
'Pm2m' : (25, 5),
'Pma2' : (28, 1),
'Pmaa' : (49, 3),
'Pmab' : (57, 4),
'Pmam' : (51, 6),
'Pman' : (53, 6),
'Pmc2_1' : (26, 1),
'Pmca' : (57, 3),
'Pmcb' : (55, 3),
'Pmcm' : (51, 5),
'Pmcn' : (62, 5),
'Pmm2' : (25, 1),
'Pmma' : (51, 1),
'Pmmb' : (51, 2),
'Pmmm' : (47, 1),
'Pmmn' : (59, 1),
'Pmn2_1' : (31, 1),
'Pmna' : (53, 1),
'Pmnb' : (62, 2),
'Pmnm' : (59, 9),
'Pmnn' : (58, 3),
'Pn-3' : (201, 1),
'Pn-3m' : (224, 1),
'Pn-3n' : (222, 1),
'Pn11' : (7, 11),
'Pn2_1a' : (33, 6),
'Pn2_1m' : (31, 5),
'Pn2b' : (30, 6),
'Pn2n' : (34, 5),
'Pna2_1' : (33, 1),
'Pnaa' : (56, 3),
'Pnab' : (60, 4),
'Pnam' : (62, 6),
'Pnan' : (52, 6),
'Pnc2' : (30, 1),
'Pnca' : (60, 3),
'Pncb' : (50, 5),
'Pncm' : (53, 5),
'Pncn' : (52, 5),
'Pnm2_1' : (31, 2),
'Pnma' : (62, 1),
'Pnmb' : (53, 2),
'Pnmm' : (59, 5),
'Pnmn' : (58, 5),
'Pnn2' : (34, 1),
'Pnna' : (52, 1),
'Pnnb' : (52, 2),
'Pnnm' : (58, 1),
'Pnnn' : (48, 1),
'R-3' : (148, 1),
'R-32/c' : (167, 1),
'R-32/m' : (166, 1),
'R-3c' : (167, 1),
'R-3m' : (166, 1),
'R3' : (146, 1),
'R32' : (155, 1),
'R3c' : (161, 1),
'R3m' : (160, 1),
}
| 43.00939
| 79
| 0.321518
| 14,983
| 64,127
| 1.363679
| 0.054862
| 0.545027
| 0.686717
| 0.75881
| 0.678641
| 0.659505
| 0.641934
| 0.635718
| 0.63102
| 0.621672
| 0
| 0.410504
| 0.364885
| 64,127
| 1,490
| 80
| 43.038255
| 0.091166
| 0.025153
| 0
| 0.005559
| 0
| 0
| 0.049183
| 0
| 0
| 0
| 0
| 0
| 0.000695
| 1
| 0.002085
| false
| 0
| 0.000695
| 0
| 0.026407
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a0575f78d502bd62aaf392832829cd6f870513b
| 2,388
|
py
|
Python
|
data2sql/data2sql.py
|
rongxinyin/pge-adr
|
db4b993a3e634bbb9d59c319eeab4058ec9935e7
|
[
"BSD-2-Clause"
] | 1
|
2019-02-10T05:53:11.000Z
|
2019-02-10T05:53:11.000Z
|
data2sql/data2sql.py
|
rongxinyin/pge-adr
|
db4b993a3e634bbb9d59c319eeab4058ec9935e7
|
[
"BSD-2-Clause"
] | null | null | null |
data2sql/data2sql.py
|
rongxinyin/pge-adr
|
db4b993a3e634bbb9d59c319eeab4058ec9935e7
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import division
import os.path
import pandas as pd
import numpy as np
import csv
import glob
#import matplotlib.pyplot as plt
#import seaborn as sns
#import datetime
customers = pd.read_csv('../../pge/D1977customers.csv',delimiter=',')
df_said = customers[['SAID','ZIPCODE','sublap']]
count = 0
for part in glob.glob('../../pge/D19771/part*'):
df = pd.read_csv(part,delimiter=',')
df = df.rename(columns = {'SA':'SAID'})
df = pd.merge(df, df_said, on = ['SAID'])
for zipcode in list(set(df.ZIPCODE)):
df_zipcode = df.loc[df.ZIPCODE == zipcode,]
with open('../../pge/D1977ZipCode/'+str(zipcode)+'.csv', 'a') as f:
df_zipcode.to_csv(f, header=False)
count += 1
print 'Processed '+ "{0:0f}%".format(count/250 * 100) + ' of the total data in D197771'
count = 0
for part in glob.glob('../../pge/D19772/part*'):
df = pd.read_csv(part,delimiter=',')
df = df.rename(columns = {'SA':'SAID'})
df = pd.merge(df, df_said, on = ['SAID'])
for zipcode in list(set(df.ZIPCODE)):
df_zipcode = df.loc[df.ZIPCODE == zipcode,]
with open('../../pge/D1977ZipCode/'+str(zipcode)+'.csv', 'a') as f:
df_zipcode.to_csv(f, header=False)
count += 1
print 'Processed '+ "{0:0f}%".format(count/250 * 100) + ' of the total data in D197772'
count = 0
for part in glob.glob('../../pge/D19773/part*'):
df = pd.read_csv(part,delimiter=',')
df = df.rename(columns = {'SA':'SAID'})
df = pd.merge(df, df_said, on = ['SAID'])
for zipcode in list(set(df.ZIPCODE)):
df_zipcode = df.loc[df.ZIPCODE == zipcode,]
with open('../../pge/D1977ZipCode/'+str(zipcode)+'.csv', 'a') as f:
df_zipcode.to_csv(f, header=False)
count += 1
print 'Processed '+ "{0:0f}%".format(count/250 * 100) + ' of the total data in D197773'
count = 0
for part in glob.glob('../../pge/D19774/part*'):
df = pd.read_csv(part,delimiter=',')
df = df.rename(columns = {'SA':'SAID'})
df = pd.merge(df, df_said, on = ['SAID'])
for zipcode in list(set(df.ZIPCODE)):
df_zipcode = df.loc[df.ZIPCODE == zipcode,]
with open('../../pge/D1977ZipCode/'+str(zipcode)+'.csv', 'a') as f:
df_zipcode.to_csv(f, header=False)
count += 1
print 'Processed '+ "{0:0f}%".format(count/60 * 100) + ' of the total data in D197774'
| 35.117647
| 91
| 0.595059
| 352
| 2,388
| 3.963068
| 0.198864
| 0.103226
| 0.063082
| 0.037276
| 0.800717
| 0.800717
| 0.787097
| 0.787097
| 0.712545
| 0.712545
| 0
| 0.054382
| 0.206868
| 2,388
| 68
| 92
| 35.117647
| 0.682154
| 0.028057
| 0
| 0.692308
| 0
| 0
| 0.204487
| 0.089733
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.115385
| null | null | 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a65cb420573f08e1a98bb4254983e24f68491e9
| 2,815
|
py
|
Python
|
tests_project/homepage/tests/test_router.py
|
wynnw/django-mako-plus
|
8a33eb3911fc84ddddd590152f475fd78c6a501f
|
[
"Apache-2.0"
] | 79
|
2015-01-21T23:29:16.000Z
|
2021-08-22T03:38:20.000Z
|
tests_project/homepage/tests/test_router.py
|
wynnw/django-mako-plus
|
8a33eb3911fc84ddddd590152f475fd78c6a501f
|
[
"Apache-2.0"
] | 34
|
2015-01-08T03:11:07.000Z
|
2021-09-07T15:04:43.000Z
|
tests_project/homepage/tests/test_router.py
|
wynnw/django-mako-plus
|
8a33eb3911fc84ddddd590152f475fd78c6a501f
|
[
"Apache-2.0"
] | 23
|
2015-01-08T03:11:26.000Z
|
2021-05-22T11:12:24.000Z
|
from django.test import TestCase
class Tester(TestCase):
def test_view_function_get(self):
# GET method
resp = self.client.get('/homepage/index.basic/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
def test_view_function_post(self):
# POST method
resp = self.client.post('/homepage/index.basic/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
def test_does_not_exist(self):
resp = self.client.get('/homepage/index.does_not_exist/1/2/3/')
self.assertEqual(resp.status_code, 404)
def test_bad_response(self):
resp = self.client.get('/homepage/index.bad_response/1/2/3/')
self.assertEqual(resp.status_code, 500)
def test_class_based_get(self):
# GET method
resp = self.client.get('/homepage/index.class_based/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
def test_class_based_post(self):
# POST method
resp = self.client.post('/homepage/index.class_based/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
def test_class_based_invalid(self):
# PUT method (not defined in class)
resp = self.client.put('/homepage/index.class_based/1/2/3/')
self.assertEqual(resp.status_code, 405) # method not allowed
def test_class_based_decorated(self):
# GET method
resp = self.client.get('/homepage/index.class_based_decorated/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
def test_class_based_argdecorated(self):
# GET method
resp = self.client.get('/homepage/index.class_based_argdecorated/1/2/3/')
self.assertEqual(resp.status_code, 200)
req = resp.wsgi_request
self.assertEqual(req.dmp.urlparams[0], '1')
self.assertEqual(req.dmp.urlparams[1], '2')
self.assertEqual(req.dmp.urlparams[2], '3')
| 34.329268
| 81
| 0.642629
| 386
| 2,815
| 4.554404
| 0.124352
| 0.230375
| 0.1843
| 0.215017
| 0.837315
| 0.837315
| 0.837315
| 0.798635
| 0.76223
| 0.76223
| 0
| 0.040669
| 0.213854
| 2,815
| 81
| 82
| 34.753086
| 0.753728
| 0.042629
| 0
| 0.566038
| 0
| 0
| 0.12621
| 0.119509
| 0
| 0
| 0
| 0
| 0.509434
| 1
| 0.169811
| false
| 0
| 0.018868
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6a88e3f755607cacf47415ebe48443ebb0d82f59
| 19,041
|
py
|
Python
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_btdigg.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 4
|
2018-09-07T15:35:24.000Z
|
2019-03-27T09:48:12.000Z
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_btdigg.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 371
|
2020-03-04T21:51:56.000Z
|
2022-03-31T20:59:11.000Z
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_btdigg.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 3
|
2019-06-18T19:57:17.000Z
|
2020-11-06T03:55:08.000Z
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import mock
from searx.engines import btdigg
from searx.testing import SearxTestCase
class TestBtdiggEngine(SearxTestCase):
def test_request(self):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 0
params = btdigg.request(query, dicto)
self.assertIn('url', params)
self.assertIn(query, params['url'])
self.assertIn('btdigg.org', params['url'])
def test_response(self):
self.assertRaises(AttributeError, btdigg.response, None)
self.assertRaises(AttributeError, btdigg.response, [])
self.assertRaises(AttributeError, btdigg.response, '')
self.assertRaises(AttributeError, btdigg.response, '[]')
response = mock.Mock(text='<html></html>')
self.assertEqual(btdigg.response(response), [])
html = u"""
<div id="search_res">
<table>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">8 B</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">710</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">5</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
</table>
</div>
"""
response = mock.Mock(text=html.encode('utf-8'))
results = btdigg.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], 'Should be the title')
self.assertEqual(results[0]['url'], 'https://btdigg.org/url')
self.assertEqual(results[0]['content'], 'Content')
self.assertEqual(results[0]['seed'], 5)
self.assertEqual(results[0]['leech'], 0)
self.assertEqual(results[0]['filesize'], 8)
self.assertEqual(results[0]['files'], 710)
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:magnet&dn=Test')
html = """
<div id="search_res">
<table>
</table>
</div>
"""
response = mock.Mock(text=html.encode('utf-8'))
results = btdigg.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
html = u"""
<div id="search_res">
<table>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">1 KB</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">710</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">5</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">1 MB</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">a</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">4</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">1 GB</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">710</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">3</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">1 TB</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">710</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">2</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
<tr>
<td class="idx">1</td>
<td>
<table class="torrent_name_tbl">
<tr>
<td class="torrent_name">
<a href="/url">Should be the title</a>
</td>
</tr>
</table>
<table class="torrent_name_tbl">
<tr>
<td class="ttth">
<a onclick="fclck(this.href)" href="magnet:?xt=urn:btih:magnet&dn=Test"
title="Télécharger des liens Magnet">[magnet]</a>
</td>
<td class="ttth">
<a href="https://btcloud.io/manager?cmd=add&info_hash=hash"
target="_blank" title="Ajouter à BTCloud">[cloud]</a>
</td>
<td>
<span class="attr_name">Taille:</span>
<span class="attr_val">a TB</span>
</td>
<td>
<span class="attr_name">Fichiers:</span>
<span class="attr_val">710</span>
</td>
<td>
<span class="attr_name">Téléchargements:</span>
<span class="attr_val">z</span>
</td>
<td>
<span class="attr_name">Temps:</span>
<span class="attr_val">417.8 jours</span>
</td>
<td>
<span class="attr_name">Dernière mise à jour:</span>
<span class="attr_val">5.3 jours</span>
</td>
<td>
<span class="attr_name">Faux:</span>
<span class="attr_val">Aucun</span>
</td>
</tr>
</table>
<pre class="snippet">
Content
</pre>
</td>
</tr>
</table>
</div>
"""
response = mock.Mock(text=html.encode('utf-8'))
results = btdigg.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 5)
self.assertEqual(results[0]['title'], 'Should be the title')
self.assertEqual(results[0]['url'], 'https://btdigg.org/url')
self.assertEqual(results[0]['content'], 'Content')
self.assertEqual(results[0]['seed'], 5)
self.assertEqual(results[0]['leech'], 0)
self.assertEqual(results[0]['files'], 710)
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:magnet&dn=Test')
self.assertEqual(results[0]['filesize'], 1024)
self.assertEqual(results[1]['filesize'], 1048576)
self.assertEqual(results[2]['filesize'], 1073741824)
self.assertEqual(results[3]['filesize'], 1099511627776)
| 49.457143
| 111
| 0.32971
| 1,455
| 19,041
| 4.232302
| 0.084536
| 0.105229
| 0.151997
| 0.075999
| 0.914095
| 0.893472
| 0.893147
| 0.893147
| 0.893147
| 0.893147
| 0
| 0.01644
| 0.555958
| 19,041
| 384
| 112
| 49.585938
| 0.711886
| 0.001103
| 0
| 0.893333
| 0
| 0.048
| 0.878904
| 0.17068
| 0
| 0
| 0
| 0
| 0.088
| 1
| 0.005333
| false
| 0
| 0.010667
| 0
| 0.018667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6a905e283fb472b0060229fcb3229c3f2a9cc994
| 1,062
|
py
|
Python
|
python/testData/inspections/PyTypeCheckerInspection/AsyncComprehensionsOverGenerator.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/inspections/PyTypeCheckerInspection/AsyncComprehensionsOverGenerator.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyTypeCheckerInspection/AsyncComprehensionsOverGenerator.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def gen():
yield 10
async def run():
{i async for i in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>}
[i async for i in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>]
{i: i ** 2 async for i in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>}
(i ** 2 async for i in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>)
list(i async for i in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>)
dataset = {data async for line in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>
async for data in <warning descr="Expected 'collections.AsyncIterable', got 'Generator[int, Any, None]' instead">gen()</warning>
if check(data)}
| 88.5
| 148
| 0.679849
| 140
| 1,062
| 5.157143
| 0.192857
| 0.077562
| 0.135734
| 0.213296
| 0.897507
| 0.897507
| 0.897507
| 0.897507
| 0.897507
| 0.897507
| 0
| 0.004505
| 0.163842
| 1,062
| 12
| 149
| 88.5
| 0.808559
| 0
| 0
| 0
| 0
| 0
| 0.507056
| 0.184384
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6ae9fd768074344fdb535e88e86c03122f60cfc0
| 959
|
py
|
Python
|
proteus/config/__init__.py
|
robertsawko/proteus
|
6f1e4c2ca1af85a906b35a5162430006f0343861
|
[
"NASA-1.3"
] | null | null | null |
proteus/config/__init__.py
|
robertsawko/proteus
|
6f1e4c2ca1af85a906b35a5162430006f0343861
|
[
"NASA-1.3"
] | null | null | null |
proteus/config/__init__.py
|
robertsawko/proteus
|
6f1e4c2ca1af85a906b35a5162430006f0343861
|
[
"NASA-1.3"
] | null | null | null |
import os
if 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('garnet'):
from garnet import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('spirit'):
from spirit import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('stampede'):
from stampede import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('copper'):
from copper import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('lightning'):
from lightning import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('viutill'):
from viutill import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('tamucluster'):
from tamucluster import *
elif 'PROTEUS_ARCH' in os.environ and os.environ['PROTEUS_ARCH'].startswith('centos'):
from centos import *
else:
from default import *
| 45.666667
| 91
| 0.743483
| 134
| 959
| 5.201493
| 0.156716
| 0.252511
| 0.149211
| 0.172166
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0
| 0
| 0.129301
| 959
| 20
| 92
| 47.95
| 0.834731
| 0
| 0
| 0
| 0
| 0
| 0.261731
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.526316
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
0a806f420f4fb2540feb3cffcc81910a2e11fa8d
| 275
|
py
|
Python
|
src/transforms/mnist_transforms.py
|
AlexeyVatolin/lightning-hydra-template
|
7e9cf910a512297dfb1f8f219227822fe7786f22
|
[
"MIT"
] | 1
|
2021-12-03T02:33:29.000Z
|
2021-12-03T02:33:29.000Z
|
src/transforms/mnist_transforms.py
|
AlexeyVatolin/lightning-hydra-template
|
7e9cf910a512297dfb1f8f219227822fe7786f22
|
[
"MIT"
] | null | null | null |
src/transforms/mnist_transforms.py
|
AlexeyVatolin/lightning-hydra-template
|
7e9cf910a512297dfb1f8f219227822fe7786f22
|
[
"MIT"
] | null | null | null |
from torchvision import transforms
mnist_train_transforms = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
)
mnist_test_transforms = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
)
| 25
| 71
| 0.734545
| 30
| 275
| 6.6
| 0.433333
| 0.20202
| 0.272727
| 0.373737
| 0.747475
| 0.747475
| 0.747475
| 0.747475
| 0.747475
| 0.747475
| 0
| 0.081301
| 0.105455
| 275
| 10
| 72
| 27.5
| 0.723577
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a9175825eb737921b4c7ccbde1fa134cdcf89ae
| 142,449
|
py
|
Python
|
tests/test_edgeql_functions.py
|
TomFaulkner/edgedb
|
911f18468790e27168449f452e3e999bb5a78ee5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_functions.py
|
TomFaulkner/edgedb
|
911f18468790e27168449f452e3e999bb5a78ee5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_functions.py
|
TomFaulkner/edgedb
|
911f18468790e27168449f452e3e999bb5a78ee5
|
[
"Apache-2.0"
] | null | null | null |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2017-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import decimal
import json
import os.path
import edgedb
from edb.testbase import server as tb
from edb.tools import test
class TestEdgeQLFunctions(tb.QueryTestCase):
SCHEMA = os.path.join(os.path.dirname(__file__), 'schemas',
'issues.esdl')
SETUP = os.path.join(os.path.dirname(__file__), 'schemas',
'issues_setup.edgeql')
async def test_edgeql_functions_count_01(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := 4,
all_issues := Issue
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_count_02(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := count(Issue),
all_issues := Issue
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_count_03(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := count(<int64>Issue.number),
all_issues := <int64>Issue.number
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_array_agg_01(self):
await self.assert_query_result(
r'''SELECT array_agg({1, 2, 3});''',
[[1, 2, 3]],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 2, 3});''',
[[3, 2, 3]],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 3, 2});''',
[[3, 3, 2]],
)
async def test_edgeql_functions_array_agg_02(self):
await self.assert_query_result(
r'''SELECT array_agg({1, 2, 3})[0];''',
[1],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 2, 3})[1];''',
[2],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 3, 2})[-1];''',
[2],
)
async def test_edgeql_functions_array_agg_03(self):
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT array_agg(x ORDER BY x);
''',
[[1, 2, 3]],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT array_agg(x ORDER BY x) = [1, 2, 3];
''',
[True],
)
async def test_edgeql_functions_array_agg_04(self):
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 2);
''',
[True],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 5);
''',
[False],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 5);
''',
[False],
)
async def test_edgeql_functions_array_agg_05(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'expression returns value of indeterminate type'):
await self.con.execute("""
SELECT array_agg({});
""")
async def test_edgeql_functions_array_agg_06(self):
await self.assert_query_result(
'''SELECT array_agg(<int64>{});''',
[[]],
)
await self.assert_query_result(
'''SELECT array_agg(DISTINCT <int64>{});''',
[[]],
)
async def test_edgeql_functions_array_agg_07(self):
await self.assert_query_result(
r'''
SELECT array_agg((SELECT schema::ObjectType FILTER False));
''',
[[]]
)
await self.assert_query_result(
r'''
SELECT array_agg(
(SELECT schema::ObjectType
FILTER <str>schema::ObjectType.id = '~')
);
''',
[[]]
)
async def test_edgeql_functions_array_agg_08(self):
await self.assert_query_result(
r'''
WITH x := <int64>{}
SELECT array_agg(x);
''',
[[]]
)
await self.assert_query_result(
r'''
WITH x := (SELECT schema::ObjectType FILTER False)
SELECT array_agg(x);
''',
[[]]
)
await self.assert_query_result(
r'''
WITH x := (
SELECT schema::ObjectType
FILTER <str>schema::ObjectType.id = '~'
)
SELECT array_agg(x);
''',
[[]]
)
async def test_edgeql_functions_array_agg_09(self):
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT
ObjectType {
l := array_agg(
ObjectType.properties.name
FILTER
ObjectType.properties.name IN {
'id',
'name'
}
ORDER BY ObjectType.properties.name ASC
)
}
FILTER
ObjectType.name = 'schema::Object';
""",
[{
'l': ['id', 'name']
}]
)
async def test_edgeql_functions_array_agg_10(self):
with self.assertRaisesRegex(
edgedb.UnsupportedFeatureError,
r"nested arrays are not supported"):
await self.con.query(r"""
SELECT array_agg(
[<str>Issue.number, Issue.status.name]
ORDER BY Issue.number);
""")
async def test_edgeql_functions_array_agg_11(self):
await self.assert_query_result(
r"""
SELECT array_agg(
(<str>Issue.number, Issue.status.name)
ORDER BY Issue.number
)[1];
""",
[['2', 'Open']]
)
async def test_edgeql_functions_array_agg_12(self):
await self.assert_query_result(
r'''
SELECT
array_agg(User{name} ORDER BY User.name);
''',
[[{'name': 'Elvis'}, {'name': 'Yury'}]]
)
result = await self.con.query(r'''
SELECT
array_agg(User{name} ORDER BY User.name);
''')
self.assertEqual(result[0][0].name, 'Elvis')
self.assertEqual(result[0][1].name, 'Yury')
async def test_edgeql_functions_array_agg_13(self):
await self.assert_query_result(
r'''
SELECT
Issue {
number,
watchers_array := array_agg(Issue.watchers {name})
}
FILTER
EXISTS Issue.watchers
ORDER BY
Issue.number;
''',
[
{'number': '1', 'watchers_array': [{'name': 'Yury'}]},
{'number': '2', 'watchers_array': [{'name': 'Elvis'}]},
{'number': '3', 'watchers_array': [{'name': 'Elvis'}]}
]
)
async def test_edgeql_functions_array_agg_14(self):
with self.assertRaisesRegex(
edgedb.UnsupportedFeatureError,
r"nested arrays are not supported"):
await self.con.query(r'''
SELECT array_agg(array_agg(User.name));
''')
async def test_edgeql_functions_array_agg_15(self):
await self.assert_query_result(
r'''
SELECT array_agg(
([([User.name],)],) ORDER BY User.name
);
''',
[ # result set
[ # array_agg
[[[['Elvis']]]], [[[['Yury']]]],
]
]
)
async def test_edgeql_functions_array_agg_16(self):
await self.assert_query_result(
r'''
SELECT array_agg( # outer array
( # tuple
array_agg( # array
( # tuple
array_agg(User.name ORDER BY User.name),
)
),
)
);
''',
[ # result set
[ # outer array_agg
[[[['Elvis', 'Yury']]]]
]
]
)
async def test_edgeql_functions_array_agg_17(self):
await self.assert_query_result(
'''SELECT count(array_agg({}))''',
[1],
)
async def test_edgeql_functions_array_agg_18(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'expression returns value of indeterminate type'):
await self.con.execute(
'''SELECT array_agg({})''',
)
async def test_edgeql_functions_array_agg_19(self):
await self.assert_query_result(
r'''FOR X in {array_agg(0)} UNION (SELECT array_unpack(X));''',
[0],
)
await self.assert_query_result(
r'''
FOR X in {array_agg((0, 1))}
UNION (SELECT array_unpack(X));
''',
[[0, 1]],
)
await self.assert_query_result(
r'''FOR X in {array_agg((0, 1))} UNION (X);''',
[[[0, 1]]],
)
async def test_edgeql_functions_array_agg_20(self):
await self.assert_query_result(
r'''
SELECT Issue { te := array_agg(.time_estimate) };
''',
tb.bag([{"te": [3000]}, {"te": []}, {"te": []}, {"te": []}]),
)
await self.assert_query_result(
r'''
SELECT Issue { te := array_agg(.time_estimate UNION 3000) };
''',
tb.bag(
[{"te": [3000, 3000]}, {"te": [3000]},
{"te": [3000]}, {"te": [3000]}],
)
)
async def test_edgeql_functions_array_unpack_01(self):
await self.assert_query_result(
r'''SELECT [1, 2];''',
[[1, 2]],
)
await self.assert_query_result(
r'''SELECT array_unpack([1, 2]);''',
[1, 2],
)
await self.assert_query_result(
r'''SELECT array_unpack([10, 20]) - 1;''',
[9, 19],
)
async def test_edgeql_functions_array_unpack_02(self):
await self.assert_query_result(
# array_agg and array_unpack are inverses of each other
r'''SELECT array_agg(array_unpack([1, 2, 3])) = [1, 2, 3];''',
[True],
)
await self.assert_query_result(
r'''SELECT array_unpack(array_agg({1, 2, 3}));''',
{1, 2, 3},
)
async def test_edgeql_functions_array_unpack_03(self):
await self.assert_query_result(
r'''
# array_agg and array_unpack are inverses of each other
SELECT array_unpack(array_agg(Issue.number));
''',
{'1', '2', '3', '4'},
)
async def test_edgeql_functions_array_unpack_04(self):
await self.assert_query_result(
r'''
# array_agg and array_unpack are inverses of each other
SELECT array_unpack(array_agg(Issue)){number};
''',
[
{'number': '1'},
{'number': '2'},
{'number': '3'},
{'number': '4'},
],
sort=lambda x: x['number']
)
async def test_edgeql_functions_array_unpack_05(self):
await self.assert_query_result(
r'''SELECT array_unpack([(1,)]).0;''',
[1],
)
async def test_edgeql_functions_array_unpack_06(self):
# We have a special case optimization for "IN array_unpack" so
# it's worth testing it.
await self.assert_query_result(
r'''SELECT 1 IN array_unpack([1]);''',
[True],
)
await self.assert_query_result(
r'''SELECT 2 IN array_unpack([1]);''',
[False],
)
await self.assert_query_result(
r'''SELECT 2 NOT IN array_unpack([1]);''',
[True],
)
await self.assert_query_result(
r'''SELECT 1 IN array_unpack({[1,2,3], [4,5,6]});''',
[True],
)
await self.assert_query_result(
r'''SELECT 0 IN array_unpack({[1,2,3], [4,5,6]});''',
[False],
)
await self.assert_query_result(
r'''SELECT 1 NOT IN array_unpack({[1,2,3], [4,5,6]});''',
[False],
)
await self.assert_query_result(
r'''SELECT 0 NOT IN array_unpack({[1,2,3], [4,5,6]});''',
[True],
)
await self.assert_query_result(
r"""
SELECT ("foo", 1) IN array_unpack([("foo", 1), ("bar", 2)]);
""",
[True],
)
async def test_edgeql_functions_enumerate_01(self):
await self.assert_query_result(
r'''SELECT [10, 20];''',
[[10, 20]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20]));''',
[[0, 10], [1, 20]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20])).0 + 100;''',
[100, 101],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20])).1 + 100;''',
[110, 120],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(1, '2')]))''',
[[0, [1, '2']]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(1, '2')])).1.1''',
['2'],
)
async def test_edgeql_functions_enumerate_02(self):
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=1)])).1;''',
[{"x": 1}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=1)])).1.x;''',
[1],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1;''',
[{"x": {"a": 2}}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1.x;''',
[{"a": 2}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1.x.a;''',
[2],
)
async def test_edgeql_functions_enumerate_03(self):
await self.assert_query_result(
r'''SELECT enumerate((SELECT User.name ORDER BY User.name));''',
[[0, 'Elvis'], [1, 'Yury']],
)
await self.assert_query_result(
r'''SELECT enumerate({'a', 'b', 'c'});''',
[[0, 'a'], [1, 'b'], [2, 'c']],
)
await self.assert_query_result(
r'''WITH A := {'a', 'b'} SELECT (A, enumerate(A));''',
[['a', [0, 'a']], ['b', [0, 'b']]],
)
await self.assert_query_result(
r'''SELECT enumerate({(1, 2), (3, 4)});''',
[[0, [1, 2]], [1, [3, 4]]],
)
async def test_edgeql_functions_enumerate_04(self):
self.assertEqual(
await self.con.query(
'select <json>enumerate({(1, 2), (3, 4)})'),
['[0, [1, 2]]', '[1, [3, 4]]'])
self.assertEqual(
await self.con.query_json(
'select <json>enumerate({(1, 2), (3, 4)})'),
'[[0, [1, 2]], [1, [3, 4]]]')
async def test_edgeql_functions_enumerate_05(self):
await self.assert_query_result(
r'''SELECT enumerate(User { name } ORDER BY .name);''',
[[0, {"name": "Elvis"}],
[1, {"name": "Yury"}]],
)
await self.assert_query_result(
r'''SELECT enumerate(User ORDER BY .name).1.name;''',
["Elvis", "Yury"],
)
async def test_edgeql_functions_enumerate_06(self):
await self.assert_query_result(
r'''SELECT enumerate(_gen_series(0, 99) FILTER FALSE);''',
[],
)
async def test_edgeql_functions_enumerate_07(self):
# Check that enumerate of a function works when the tuple type
# appears in the schema (like tuple<int64, int64> does)
await self.assert_query_result(
r'''
WITH Z := enumerate(array_unpack([10, 20])),
Y := enumerate(Z),
SELECT (Y.1.0, Y.1.1) ORDER BY Y.0;
''',
[[0, 10], [1, 20]]
)
async def test_edgeql_functions_enumerate_08(self):
await self.assert_query_result(
r'''
SELECT Issue { te := enumerate(.time_estimate) };
''',
tb.bag(
[{"te": [0, 3000]}, {"te": None}, {"te": None}, {"te": None}]
)
)
await self.assert_query_result(
r'''
SELECT Issue { te := enumerate(.time_estimate UNION 3000) };
''',
tb.bag([
{"te": [[0, 3000], [1, 3000]]},
{"te": [[0, 3000]]},
{"te": [[0, 3000]]},
{"te": [[0, 3000]]}
])
)
async def test_edgeql_functions_array_get_01(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 2);''',
[3],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2);''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20);''',
[],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20);''',
[],
)
async def test_edgeql_functions_array_get_02(self):
await self.assert_query_result(
r'''
SELECT array_get(array_agg(
Issue.number ORDER BY Issue.number), 2);
''',
['3'],
)
await self.assert_query_result(
r'''
SELECT array_get(array_agg(
Issue.number ORDER BY Issue.number), -2);
''',
['3'],
)
await self.assert_query_result(
r'''SELECT array_get(array_agg(Issue.number), 20);''',
[]
)
await self.assert_query_result(
r'''SELECT array_get(array_agg(Issue.number), -20);''',
[]
)
async def test_edgeql_functions_array_get_03(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "array_get.+" does not exist'):
await self.con.query(r'''
SELECT array_get([1, 2, 3], 2^40);
''')
async def test_edgeql_functions_array_get_04(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 0) ?? 42;''',
[1],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 0, default := -1) ?? 42;''',
[1],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20) ?? 42;''',
[42],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20) ?? 42;''',
[42],
)
async def test_edgeql_functions_array_get_05(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 1, default := 4200) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2, default := 4200) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20, default := 4200) ?? 42;''',
[4200],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20, default := 4200) ?? 42;''',
[4200],
)
async def test_edgeql_functions_array_get_06(self):
await self.assert_query_result(
r'''SELECT array_get([(20,), (30,)], 0);''',
[[20]],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20), (a:=30)], 1);''',
[{'a': 30}],
)
await self.assert_query_result(
r'''SELECT array_get([(20,), (30,)], 0).0;''',
[20],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20), (a:=30)], 1).0;''',
[30],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20, b:=1), (a:=30, b:=2)], 0).a;''',
[20],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20, b:=1), (a:=30, b:=2)], 1).b;''',
[2],
)
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_re_match_01(self):
await self.assert_query_result(
r'''SELECT re_match('ab', 'AbabaB');''',
[['ab']],
)
await self.assert_query_result(
r'''SELECT re_match('AB', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT re_match('(?i)AB', 'AbabaB');''',
[['Ab']],
)
await self.assert_query_result(
r'''SELECT re_match('ac', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match('ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match('ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT x := re_match({'(?i)ab', 'a'}, 'AbabaB') ORDER BY x;''',
[['Ab'], ['a']],
)
await self.assert_query_result(
r'''
SELECT x := re_match({'(?i)ab', 'a'}, {'AbabaB', 'qwerty'})
ORDER BY x;
''',
[['Ab'], ['a']],
)
async def test_edgeql_functions_re_match_02(self):
await self.assert_query_result(
r'''
WITH MODULE schema
SELECT x := re_match('(\\w+)::(Link|Property)',
ObjectType.name)
ORDER BY x;
''',
[['schema', 'Link'], ['schema', 'Property']],
)
async def test_edgeql_functions_re_match_03(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
"invalid regular expression"
):
await self.con.query(r'''
select re_match('\\', 'asdf')
''')
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_re_match_all_01(self):
await self.assert_query_result(
r'''SELECT re_match_all('ab', 'AbabaB');''',
[['ab']],
)
await self.assert_query_result(
r'''SELECT re_match_all('AB', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT re_match_all('(?i)AB', 'AbabaB');''',
[['Ab'], ['ab'], ['aB']],
)
await self.assert_query_result(
r'''SELECT re_match_all('ac', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match_all('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match_all('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match_all('(?i)ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match_all('(?i)ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''
SELECT x := re_match_all({'(?i)ab', 'a'}, 'AbabaB')
ORDER BY x;''',
[['Ab'], ['a'], ['a'], ['aB'], ['ab']],
)
await self.assert_query_result(
r'''
SELECT x := re_match_all({'(?i)ab', 'a'},
{'AbabaB', 'qwerty'})
ORDER BY x;
''',
[['Ab'], ['a'], ['a'], ['aB'], ['ab']],
)
async def test_edgeql_functions_re_match_all_02(self):
await self.assert_query_result(
r'''
WITH
MODULE schema,
C2 := ScalarType
SELECT
count(re_match_all('(\\w+)', ScalarType.name)) =
2 * count(C2);
''',
[True],
)
async def test_edgeql_functions_re_test_01(self):
await self.assert_query_result(
r'''SELECT re_test('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT re_test('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT re_test(r'(?i)ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT re_test(r'(?i)ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
# the result always exists
r'''SELECT EXISTS re_test('(?i)ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_test('(?i)ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT x := re_test({'ab', 'a'}, 'AbabaB') ORDER BY x;''',
[True, True],
)
await self.assert_query_result(
r'''
SELECT x := re_test({'ab', 'a'}, {'AbabaB', 'qwerty'})
ORDER BY x;
''',
[False, False, True, True],
)
async def test_edgeql_functions_re_test_02(self):
await self.assert_query_result(
r'''
WITH MODULE schema
SELECT count(
ObjectType FILTER re_test(r'(\W\w)bject$', ObjectType.name)
) = 2;
''',
[True],
)
async def test_edgeql_functions_re_replace_01(self):
await self.assert_query_result(
r'''SELECT re_replace('l', 'L', 'Hello World');''',
['HeLlo World'],
)
await self.assert_query_result(
r'''SELECT re_replace('l', 'L', 'Hello World', flags := 'g');''',
['HeLLo WorLd'],
)
await self.assert_query_result(
r'''
SELECT re_replace('[a-z]', '~', 'Hello World',
flags := 'i');''',
['~ello World'],
)
await self.assert_query_result(
r'''
SELECT re_replace('[a-z]', '~', 'Hello World',
flags := 'gi');
''',
['~~~~~ ~~~~~'],
)
async def test_edgeql_functions_re_replace_02(self):
await self.assert_query_result(
r'''SELECT re_replace('[aeiou]', '~', User.name);''',
{'Elv~s', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'g');
''',
{'Elv~s', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'i');
''',
{'~lvis', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'gi');
''',
{'~lv~s', 'Y~ry'},
)
async def test_edgeql_functions_sum_01(self):
await self.assert_query_result(
r'''SELECT sum({1, 2, 3, -4, 5});''',
[7],
)
await self.assert_query_result(
r'''SELECT sum({0.1, 0.2, 0.3, -0.4, 0.5});''',
[0.7],
)
async def test_edgeql_functions_sum_02(self):
await self.assert_query_result(
r'''
SELECT sum({1, 2, 3, -4.2, 5});
''',
[6.8],
)
async def test_edgeql_functions_sum_03(self):
await self.assert_query_result(
r'''
SELECT sum({1.0, 2.0, 3.0, -4.2, 5});
''',
[6.8],
)
async def test_edgeql_functions_sum_04(self):
await self.assert_query_result(
r'''SELECT sum(<int16>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<int32>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<int64>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<float32>2) IS float32;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<float64>2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<decimal>2) IS decimal;''',
[True],
)
async def test_edgeql_functions_unix_to_datetime_01(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(1590595184.584);'
)
self.assertEqual('2020-05-27T15:59:44.584+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_02(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(1590595184);'
)
self.assertEqual('2020-05-27T15:59:44+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_03(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(517795200);'
)
self.assertEqual('1986-05-30T00:00:00+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_04(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(517795200.00n);'
)
self.assertEqual('1986-05-30T00:00:00+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
"'std::datetime' value out of range"
):
await self.con.query_single(
'SELECT to_datetime(999999999999)'
)
async def test_edgeql_functions_datetime_current_01(self):
# make sure that datetime as a str gets serialized to a
# particular format
dt = await self.con.query_single('SELECT <str>datetime_current();')
self.assertRegex(dt, r'\d+-\d+-\d+T\d+:\d+:\d+\.\d+.*')
async def test_edgeql_functions_datetime_current_02(self):
batch1 = await self.con.query_json(r'''
WITH MODULE schema
SELECT Type {
dt_t := datetime_of_transaction(),
dt_s := datetime_of_statement(),
dt_n := datetime_current(),
};
''')
batch2 = await self.con.query_json(r'''
# NOTE: this test assumes that there's at least 1 microsecond
# time difference between statements
WITH MODULE schema
SELECT Type {
dt_t := datetime_of_transaction(),
dt_s := datetime_of_statement(),
dt_n := datetime_current(),
};
''')
batch1 = json.loads(batch1)
batch2 = json.loads(batch2)
batches = batch1 + batch2
# all of the dt_t should be the same
set_dt_t = {t['dt_t'] for t in batches}
self.assertTrue(len(set_dt_t) == 1)
# all of the dt_s should be the same in each batch
set_dt_s1 = {t['dt_s'] for t in batch1}
set_dt_s2 = {t['dt_s'] for t in batch2}
self.assertTrue(len(set_dt_s1) == 1)
self.assertTrue(len(set_dt_s1) == 1)
# the transaction and statement datetimes should be in
# chronological order
dt_t = set_dt_t.pop()
dt_s1 = set_dt_s1.pop()
dt_s2 = set_dt_s2.pop()
self.assertTrue(dt_t <= dt_s1 < dt_s2)
# the first "now" datetime is no earlier than the statement
# for each batch
self.assertTrue(dt_s1 <= batch1[0]['dt_n'])
self.assertTrue(dt_s2 <= batch2[0]['dt_n'])
# every dt_n is already in chronological order
self.assertEqual(
[t['dt_n'] for t in batches],
sorted([t['dt_n'] for t in batches])
)
# the first dt_n is strictly earlier than the last
self.assertTrue(batches[0]['dt_n'] < batches[-1]['dt_n'])
async def test_edgeql_functions_datetime_get_01(self):
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'month');
''',
{5},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'day');
''',
{7},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'hour');
''',
{20},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'minutes');
''',
{1},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'seconds');
''',
{22.306916},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'epochseconds');
''',
{1525723282.306916},
)
async def test_edgeql_functions_datetime_get_02(self):
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'month');
''',
{5},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'day');
''',
{7},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'hour');
''',
{15},
)
await self.assert_query_result(
r'''SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'minutes');
''',
{1},
)
await self.assert_query_result(
r'''SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'seconds');
''',
{22.306916},
)
async def test_edgeql_functions_datetime_get_03(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.query('''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916',
'timezone_hour'
);
''')
async def test_edgeql_functions_datetime_get_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.query('''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05',
'timezone_hour');
''')
async def test_edgeql_functions_datetime_get_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.execute(
r'''
SELECT <str>datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'epoch');
''')
async def test_edgeql_functions_date_get_01(self):
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'month');
''',
{5},
)
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'day');
''',
{7},
)
async def test_edgeql_functions_date_get_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::date_get'):
await self.con.execute(
r'''
SELECT <str>cal::date_get(
<cal::local_date>'2018-05-07', 'epoch');
''')
async def test_edgeql_functions_time_get_01(self):
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'hour')
''',
{15},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'minutes')
''',
{1},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'seconds')
''',
{22.306916},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916',
'midnightseconds')
''',
{54082.306916},
)
async def test_edgeql_functions_time_get_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::time_get'):
await self.con.execute(
r'''
SELECT <str>cal::time_get(
<cal::local_time>'15:01:22.306916', 'epoch');
''')
async def test_edgeql_functions_datetime_trunc_01(self):
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'years');
''',
{'2018-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'decades');
''',
{'2010-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'centuries');
''',
{'2001-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'quarters');
''',
{'2018-04-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'months');
''',
{'2018-05-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'weeks');
''',
{'2018-05-07T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'days');
''',
{'2018-05-07T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'hours');
''',
{'2018-05-07T20:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'minutes');
''',
{'2018-05-07T20:01:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'seconds');
''',
{'2018-05-07T20:01:22+00:00'},
)
async def test_edgeql_functions_datetime_trunc_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_truncate'):
await self.con.execute(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'second');
''')
async def test_edgeql_functions_duration_trunc_01(self):
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'hours');
''',
{'PT15H'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'minutes');
''',
{'PT15H1M'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'seconds');
''',
{'PT15H1M22S'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'milliseconds');
''',
{'PT15H1M22.306S'},
)
# Currently no-op but may be useful if precision is improved
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'microseconds');
''',
{'PT15H1M22.306916S'},
)
async def test_edgeql_functions_duration_trunc_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::duration_truncate'):
await self.con.execute(
r'''
SELECT <str>duration_truncate(
<duration>'73 hours', 'day');
''')
async def test_edgeql_functions_to_datetime_01(self):
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
2018, 5, 7, 15, 1, 22.306916, 'EST');
''',
['2018-05-07T20:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
2018, 5, 7, 15, 1, 22.306916, '-5');
''',
['2018-05-07T20:01:22.306916+00:00'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('SELECT to_datetime("2017-10-10", "")')
async def test_edgeql_functions_to_datetime_02(self):
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
cal::to_local_datetime(2018, 5, 7, 15, 1, 22.306916),
'EST')
''',
['2018-05-07T20:01:22.306916+00:00'],
)
async def test_edgeql_functions_to_datetime_03(self):
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZHTZM') =
<datetime>'2019-01-01T00:00:00+0715';
''',
[True],
)
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 07TZM',
'YYYY/MM/DD H24:MI:SS TZH"TZM"') =
<datetime>'2019-01-01T00:00:00+07';
''',
[True],
)
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 TZH07TZM',
'YYYY/MM/DD H24:MI:SS "TZH"TZH"TZM"') =
<datetime>'2019-01-01T00:00:00+07';
''',
[True],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'missing required time zone in format'):
async with self.con.transaction():
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00 TZH07',
'YYYY/MM/DD H24:MI:SS "TZH"TZM') =
<datetime>'2019-01-01T00:00:00+07';
''')
async def test_edgeql_functions_to_datetime_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'missing required time zone in input'):
async with self.con.transaction():
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS "NOPE"TZHTZM');
''')
async def test_edgeql_functions_to_datetime_05(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# omitting time zone
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00');
''')
async def test_edgeql_functions_to_datetime_06(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(10000, 1, 1, 1, 1, 1, 'UTC');
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(0, 1, 1, 1, 1, 1, 'UTC');
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(-1, 1, 1, 1, 1, 1, 'UTC');
''')
async def test_edgeql_functions_to_local_datetime_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_datetime(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['2018-05-07T13:01:22.306916'],
)
async def test_edgeql_functions_to_local_datetime_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_datetime(2018, 5, 7, 15, 1, 22.306916);
''',
['2018-05-07T15:01:22.306916'],
)
async def test_edgeql_functions_to_local_datetime_03(self):
await self.assert_query_result(
# The time zone is ignored because the format string just
# specifies arbitrary characters in its place.
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS "NOTZ"') =
<cal::local_datetime>'2019-01-01T00:00:00';
''',
[True],
)
await self.assert_query_result(
# The time zone is ignored because the format string does
# not expect to parse it.
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS') =
<cal::local_datetime>'2019-01-01T00:00:00';
''',
[True],
)
async def test_edgeql_functions_to_local_datetime_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZH') =
<cal::local_datetime>'2019-01-01T00:00:00';
''')
async def test_edgeql_functions_to_local_datetime_05(self):
await self.assert_query_result(
# Make sure that time zone change (while converting
# `to_local_datetime`) is not leaking.
r'''
SELECT (<str><cal::local_datetime>'2019-01-01 00:00:00',
<str>cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS'),
<str><cal::local_datetime>'2019-02-01 00:00:00');
''',
[['2019-01-01T00:00:00',
'2019-01-01T00:00:00',
'2019-02-01T00:00:00']],
)
async def test_edgeql_functions_to_local_datetime_06(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including time zone
await self.con.query(r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715');
''')
async def test_edgeql_functions_to_local_datetime_07(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(10000, 1, 1, 1, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(0, 1, 1, 1, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(-1, 1, 1, 1, 1, 1);
''')
async def test_edgeql_functions_to_local_date_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_date(2018, 5, 7);
''',
['2018-05-07'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
'SELECT cal::to_local_date("2017-10-10", "")')
async def test_edgeql_functions_to_local_date_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_date(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['2018-05-07'],
)
async def test_edgeql_functions_to_local_date_03(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_date('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZH') =
<cal::local_date>'2019-01-01';
''')
async def test_edgeql_functions_to_local_date_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including too much
await self.con.query(r'''
SELECT
cal::to_local_date('2019/01/01 00:00:00 0715');
''')
async def test_edgeql_functions_to_local_date_05(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(10000, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(0, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(-1, 1, 1);
''')
async def test_edgeql_functions_to_local_time_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_time(15, 1, 22.306916);
''',
['15:01:22.306916'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
'SELECT cal::to_local_time("12:00:00", "")')
async def test_edgeql_functions_to_local_time_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_time(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['13:01:22.306916'],
)
async def test_edgeql_functions_to_local_time_03(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_time('00:00:00 0715',
'H24:MI:SS TZH') =
<cal::local_time>'00:00:00';
''')
async def test_edgeql_functions_to_local_time_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including time zone
await self.con.query(r'''
SELECT
cal::to_local_datetime('00:00:00 0715');
''')
async def test_edgeql_functions_to_duration_01(self):
await self.assert_query_result(
r'''SELECT <str>to_duration(hours:=20);''',
['PT20H'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(minutes:=20);''',
['PT20M'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(seconds:=20);''',
['PT20S'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(seconds:=20.15);''',
['PT20.15S'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(microseconds:=100);''',
['PT0.0001S'],
)
async def test_edgeql_functions_to_duration_02(self):
await self.assert_query_result(
r'''SELECT to_duration(hours:=20) > to_duration(minutes:=20);''',
[True],
)
await self.assert_query_result(
r'''SELECT to_duration(minutes:=20) > to_duration(seconds:=20);''',
[True],
)
async def test_edgeql_functions_duration_to_seconds(self):
await self.assert_query_result(
r'''SELECT duration_to_seconds(<duration>'20 hours');''',
[72000.0],
)
await self.assert_query_result(
r'''SELECT duration_to_seconds(<duration>'1:02:03.000123');''',
[3723.000123],
)
async def test_edgeql_functions_duration_to_seconds_exact(self):
# at this value extract(epoch from duration) is imprecise
await self.assert_query_result(
r'''SELECT duration_to_seconds(
<duration>'1801439850 seconds 123456 microseconds');''',
[1801439850.123456],
)
async def test_edgeql_functions_to_str_01(self):
# at the very least the cast <str> should be equivalent to
# a call to to_str() without explicit format for simple scalars
await self.assert_query_result(
r'''
WITH DT := datetime_current()
# FIXME: the cast has a "T" and the str doesn't for some reason
SELECT <str>DT = to_str(DT);
''',
[True],
)
await self.assert_query_result(
r'''
WITH D := cal::to_local_date(datetime_current(), 'UTC')
SELECT <str>D = to_str(D);
''',
[True],
)
await self.assert_query_result(
r'''
WITH NT := cal::to_local_time(datetime_current(), 'UTC')
SELECT <str>NT = to_str(NT);
''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123 = to_str(123);''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123.456 = to_str(123.456);''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123.456e-20 = to_str(123.456e-20);''',
[True],
)
await self.assert_query_result(
r'''
SELECT <str><decimal>'123456789012345678901234567890.1234567890' =
to_str(123456789012345678901234567890.1234567890n);
''',
[True],
)
# Empty format string shouldn't produce an empty set.
#
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1.1, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1.1n, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(to_json('{}'), "")''')
async def test_edgeql_functions_to_str_02(self):
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'YYYY-MM-DD');
''',
{'2018-05-07'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'YYYYBC');
''',
{'2018AD'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'FMDDth of FMMonth, YYYY');
''',
{'7th of May, 2018'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'CCth "century"');
''',
{'21st century'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'Y,YYY Month DD Day');
''',
{'2,018 May 07 Monday '},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'foo');
''',
{'foo'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, ' ');
''',
{' '}
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, '');
''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := to_duration(hours:=20)
SELECT to_str(DT, '');
''')
async def test_edgeql_functions_to_str_03(self):
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'HH:MI A.M.');
''',
# tests run in UTC time-zone, so 15:01-05 is 20:01 UTC
{'08:01 P.M.'},
)
async def test_edgeql_functions_to_str_04(self):
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'YYYY-MM-DD');
''',
{'2018-05-07'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'YYYYBC');
''',
{'2018AD'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'FMDDth of FMMonth, YYYY');
''',
{'7th of May, 2018'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'CCth "century"');
''',
{'21st century'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'Y,YYY Month DD Day');
''',
{'2,018 May 07 Monday '},
)
await self.assert_query_result(
r'''
# the format string doesn't have any special characters
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'foo');
''',
{'foo'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <cal::local_time>'12:00:00'
SELECT to_str(DT, '');
''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, '');
''')
async def test_edgeql_functions_to_str_05(self):
await self.assert_query_result(
r'''SELECT to_str(123456789, '99');''',
{' ##'}, # the number is too long for the desired representation
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999999999');''',
{' 123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999,999,999');''',
{' 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999,999,999,999');''',
{' 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'FM999,999,999,999');''',
{'123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S999,999,999,999');''',
{' +123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'SG999,999,999,999');''',
{'+ 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S099,999,999,999');''',
{'+000,123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'SG099,999,999,999');''',
{'+000,123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S099999999999');''',
{'+000123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S990999999999');''',
{' +0123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'FMS990999999999');''',
{'+0123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(-123456789, '999999999PR');''',
{'<123456789>'},
)
await self.assert_query_result(
r'''SELECT to_str(987654321, 'FM999999999th');''',
{'987654321st'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(987654321, '');''',)
async def test_edgeql_functions_to_str_06(self):
await self.assert_query_result(
r'''SELECT to_str(123.456789, '99');''',
{' ##'}, # the integer part of the number is too long
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999');''',
{' 123'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999.999');''',
{' 123.457'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999.999999999');''',
{' 123.456789000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, 'FM999.999999999');''',
{'123.456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '999.999999999');''',
{' .000000000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, 'FM999.999999999');''',
{'0.'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '099.999999990');''',
{' 000.000000000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, 'FM990.099999999');''',
{'0.0'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '0.0999EEEE');''',
{' 1.2346e-18'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e20, '0.0999EEEE');''',
{' 1.2346e+22'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(123.456789e20, '');''')
async def test_edgeql_functions_to_str_07(self):
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH:MI A.M.');''',
{'03:01 P.M.'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH:MI:SSam.');''',
{'03:01:22pm.'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH24:MI');''',
{'15:01'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', ' ');''',
{' '},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(<cal::local_time>'15:01:22', '');''',)
async def test_edgeql_functions_array_join_01(self):
await self.assert_query_result(
r'''SELECT array_join(['one', 'two', 'three'], ', ');''',
['one, two, three'],
)
await self.assert_query_result(
r'''SELECT array_join(['one', 'two', 'three'], '');''',
['onetwothree'],
)
await self.assert_query_result(
r'''SELECT array_join(<array<str>>[], ', ');''',
[''],
)
async def test_edgeql_functions_str_split_01(self):
await self.assert_query_result(
r'''SELECT str_split('one, two, three', ', ');''',
[['one', 'two', 'three']],
)
await self.assert_query_result(
r'''SELECT str_split('', ', ');''',
[[]],
)
await self.assert_query_result(
r'''SELECT str_split('foo', ', ');''',
[['foo']],
)
await self.assert_query_result(
r'''SELECT str_split('foo', '');''',
[['f', 'o', 'o']],
)
async def test_edgeql_functions_to_int_01(self):
await self.assert_query_result(
r'''SELECT to_int64(' 123456789', '999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' 123,456,789', '999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' 123,456,789', '999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('123,456,789', 'FM999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' +123,456,789', 'S999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+ 123,456,789', 'SG999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000,123,456,789', 'S099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000,123,456,789', 'SG099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000123456789', 'S099999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' +0123456789', 'S990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+0123456789', 'FMS990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('<123456789>', '999999999PR');''',
{-123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('987654321st', 'FM999999999th');''',
{987654321},
)
await self.assert_query_result(
r'''SELECT to_int64('987654321st', <str>$0);''',
{987654321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int64('1', '')''')
async def test_edgeql_functions_to_int_02(self):
await self.assert_query_result(
r'''SELECT to_int32(' 123456789', '999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' 123,456,789', '999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' 123,456,789', '999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('123,456,789', 'FM999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' +123,456,789', 'S999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+ 123,456,789', 'SG999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000,123,456,789', 'S099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000,123,456,789', 'SG099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000123456789', 'S099999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' +0123456789', 'S990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+0123456789', 'FMS990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('<123456789>', '999999999PR');''',
{-123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('987654321st', 'FM999999999th');''',
{987654321},
)
await self.assert_query_result(
r'''SELECT to_int32('987654321st', <str>$0);''',
{987654321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int32('1', '')''')
async def test_edgeql_functions_to_int_03(self):
await self.assert_query_result(
r'''SELECT to_int16('12345', '999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('12,345', '999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16(' 12,345', '999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('12,345', 'FM999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+12,345', 'S999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+ 12,345', 'SG999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('-000,012,345', 'S099,999,999,999');''',
{-12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+000,012,345', 'SG099,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+00012345', 'S099999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16(' +012345', 'S990999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+012345', 'FMS990999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('<12345>', '999999999PR');''',
{-12345},
)
await self.assert_query_result(
r'''SELECT to_int16('4321st', 'FM999999999th');''',
{4321},
)
await self.assert_query_result(
r'''SELECT to_int16('4321st', <str>$0);''',
{4321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int16('1', '')''')
async def test_edgeql_functions_to_float_01(self):
await self.assert_query_result(
r'''SELECT to_float64(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_float64('123.457', '999.999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float64(' 123.456789000', '999.999999999');''',
{123.456789},
)
await self.assert_query_result(
r'''SELECT to_float64('123.456789', 'FM999.999999999');''',
{123.456789},
)
await self.assert_query_result(
r'''SELECT to_float64('123.456789', <str>$0);''',
{123.456789},
variables=('FM999.999999999',)
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_float64('1', '')''')
async def test_edgeql_functions_to_float_02(self):
await self.assert_query_result(
r'''SELECT to_float32(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_float32('123.457', '999.999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float32(' 123.456789000', '999.999999999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float32('123.456789', 'FM999.999999999');''',
{123.457},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_float32('1', '')''')
async def test_edgeql_functions_to_bigint_01(self):
await self.assert_query_result(
r'''SELECT to_bigint(' 123', '999');''',
{123},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_bigint('1', '')''')
async def test_edgeql_functions_to_bigint_02(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid syntax'):
async with self.con.transaction():
await self.con.query('''SELECT to_bigint('1.02')''')
async def test_edgeql_functions_to_decimal_01(self):
await self.assert_query_result(
r'''SELECT to_decimal(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_decimal('123.457', '999.999');''',
exp_result_json={123.457},
exp_result_binary={decimal.Decimal('123.457')},
)
await self.assert_query_result(
r'''SELECT to_decimal(' 123.456789000', '999.999999999');''',
exp_result_json={123.456789},
exp_result_binary={decimal.Decimal('123.456789')},
)
await self.assert_query_result(
r'''SELECT to_decimal('123.456789', 'FM999.999999999');''',
exp_result_json={123.456789},
exp_result_binary={decimal.Decimal('123.456789')},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_decimal('1', '')''')
async def test_edgeql_functions_to_decimal_02(self):
await self.assert_query_result(
r'''
SELECT to_decimal(
'123456789123456789123456789.123456789123456789123456789',
'FM999999999999999999999999999.999999999999999999999999999');
''',
exp_result_json={
123456789123456789123456789.123456789123456789123456789},
exp_result_binary={decimal.Decimal(
'123456789123456789123456789.123456789123456789123456789')},
)
async def test_edgeql_functions_len_01(self):
await self.assert_query_result(
r'''SELECT len('');''',
[0],
)
await self.assert_query_result(
r'''SELECT len('hello');''',
[5],
)
await self.assert_query_result(
r'''SELECT __std__::len({'hello', 'world'});''',
[5, 5]
)
async def test_edgeql_functions_len_02(self):
await self.assert_query_result(
r'''SELECT len(b'');''',
[0],
)
await self.assert_query_result(
r'''SELECT len(b'hello');''',
[5],
)
await self.assert_query_result(
r'''SELECT len({b'hello', b'world'});''',
[5, 5]
)
async def test_edgeql_functions_len_03(self):
await self.assert_query_result(
r'''SELECT len(<array<str>>[]);''',
[0],
)
await self.assert_query_result(
r'''SELECT len([]);''',
[0],
)
await self.assert_query_result(
r'''SELECT len(['hello']);''',
[1],
)
await self.assert_query_result(
r'''SELECT len(['hello', 'world']);''',
[2],
)
await self.assert_query_result(
r'''SELECT len([1, 2, 3, 4, 5]);''',
[5],
)
await self.assert_query_result(
r'''SELECT len({['hello'], ['hello', 'world']});''',
{1, 2},
)
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_min_01(self):
await self.assert_query_result(
r'''SELECT min(<int64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT min(4);''',
[4],
)
await self.assert_query_result(
r'''SELECT min({10, 20, -3, 4});''',
[-3],
)
await self.assert_query_result(
r'''SELECT min({10, 2.5, -3.1, 4});''',
[-3.1],
)
await self.assert_query_result(
r'''SELECT min({'10', '20', '-3', '4'});''',
['-3'],
)
await self.assert_query_result(
r'''SELECT min({'10', 'hello', 'world', '-3', '4'});''',
['-3'],
)
await self.assert_query_result(
r'''SELECT min({'hello', 'world'});''',
['hello'],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [3, 4]});''',
[[1, 2]],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [3, 4], <array<int64>>[]});''',
[[]],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [1, 0.4]});''',
[[1, 0.4]],
)
await self.assert_query_result(
r'''
SELECT <str>min(<datetime>{
'2018-05-07T15:01:22.306916-05',
'2017-05-07T16:01:22.306916-05',
'2017-01-07T11:01:22.306916-05',
'2018-01-07T11:12:22.306916-05',
});
''',
['2017-01-07T16:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_datetime>{
'2018-05-07T15:01:22.306916',
'2017-05-07T16:01:22.306916',
'2017-01-07T11:01:22.306916',
'2018-01-07T11:12:22.306916',
});
''',
['2017-01-07T11:01:22.306916'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_date>{
'2018-05-07',
'2017-05-07',
'2017-01-07',
'2018-01-07',
});
''',
['2017-01-07'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_time>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['11:01:22'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<duration>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['PT11H1M22S'],
)
async def test_edgeql_functions_min_02(self):
await self.assert_query_result(
r'''
SELECT min(User.name);
''',
['Elvis'],
)
await self.assert_query_result(
r'''
SELECT min(Issue.time_estimate);
''',
[3000],
)
await self.assert_query_result(
r'''
SELECT min(<int64>Issue.number);
''',
[1],
)
async def test_edgeql_functions_min_03(self):
# Objects are valid inputs to "min" and are ordered by their .id.
await self.assert_query_result(
r'''
SELECT min(User).id = min(User.id);
''',
[True],
)
async def test_edgeql_functions_max_01(self):
await self.assert_query_result(
r'''SELECT max(<int64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT max(4);''',
[4],
)
await self.assert_query_result(
r'''SELECT max({10, 20, -3, 4});''',
[20],
)
await self.assert_query_result(
r'''SELECT max({10, 2.5, -3.1, 4});''',
[10],
)
await self.assert_query_result(
r'''SELECT max({'10', '20', '-3', '4'});''',
['4'],
)
await self.assert_query_result(
r'''SELECT max({'10', 'hello', 'world', '-3', '4'});''',
['world'],
)
await self.assert_query_result(
r'''SELECT max({'hello', 'world'});''',
['world'],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [3, 4]});''',
[[3, 4]],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [3, 4], <array<int64>>[]});''',
[[3, 4]],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [1, 0.4]});''',
[[1, 2]],
)
await self.assert_query_result(
r'''
SELECT <str>max(<datetime>{
'2018-05-07T15:01:22.306916-05',
'2017-05-07T16:01:22.306916-05',
'2017-01-07T11:01:22.306916-05',
'2018-01-07T11:12:22.306916-05',
});
''',
['2018-05-07T20:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_datetime>{
'2018-05-07T15:01:22.306916',
'2017-05-07T16:01:22.306916',
'2017-01-07T11:01:22.306916',
'2018-01-07T11:12:22.306916',
});
''',
['2018-05-07T15:01:22.306916'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_date>{
'2018-05-07',
'2017-05-07',
'2017-01-07',
'2018-01-07',
});
''',
['2018-05-07'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_time>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['16:01:22'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<duration>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['PT16H1M22S'],
)
async def test_edgeql_functions_max_02(self):
await self.assert_query_result(
r'''
SELECT max(User.name);
''',
['Yury'],
)
await self.assert_query_result(
r'''
SELECT max(Issue.time_estimate);
''',
[3000],
)
await self.assert_query_result(
r'''
SELECT max(<int64>Issue.number);
''',
[4],
)
async def test_edgeql_functions_max_03(self):
# Objects are valid inputs to "max" and are ordered by their .id.
await self.assert_query_result(
r'''
SELECT max(User).id = max(User.id);
''',
[True],
)
async def test_edgeql_functions_all_01(self):
await self.assert_query_result(
r'''SELECT all(<bool>{});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({True});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({False});''',
[False],
)
await self.assert_query_result(
r'''SELECT all({True, False, True, False});''',
[False],
)
await self.assert_query_result(
r'''SELECT all({1, 2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT all({0, -1, -2, -3} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT all(<int64>{} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} IN <int64>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT all(<int64>{} IN <int64>{});''',
[True],
)
async def test_edgeql_functions_all_02(self):
await self.assert_query_result(
r'''
SELECT all(len(User.name) = 4);
''',
[False],
)
await self.assert_query_result(
r'''
SELECT all(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
);
''',
[False],
)
await self.assert_query_result(
r'''
SELECT all(Issue.number != '');
''',
[True],
)
async def test_edgeql_functions_any_01(self):
await self.assert_query_result(
r'''SELECT any(<bool>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({True});''',
[True],
)
await self.assert_query_result(
r'''SELECT any({False});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({True, False, True, False});''',
[True],
)
await self.assert_query_result(
r'''SELECT any({1, 2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT any({0, -1, -2, -3} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT any(<int64>{} IN {-2, -1, 0, 1, 2, 3, 4});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} IN <int64>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT any(<int64>{} IN <int64>{});''',
[False],
)
async def test_edgeql_functions_any_02(self):
await self.assert_query_result(
r'''
SELECT any(len(User.name) = 4);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(Issue.number != '');
''',
[True],
)
async def test_edgeql_functions_any_03(self):
await self.assert_query_result(
r'''
SELECT any(len(User.name) = 4) =
NOT all(NOT (len(User.name) = 4));
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
) = NOT all(
(
FOR I IN {Issue}
UNION NOT EXISTS I.time_estimate
)
);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(Issue.number != '') = NOT all(Issue.number = '');
''',
[True],
)
async def test_edgeql_functions_round_01(self):
await self.assert_query_result(
r'''SELECT round(<float64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT round(<float64>1);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.2);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<float64>-1.2);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.2);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-1.2);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<float64>-2.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<float64>-1.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<float64>-0.5);''',
[0],
)
await self.assert_query_result(
r'''SELECT round(<float64>0.5);''',
[0],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.5);''',
[2],
)
await self.assert_query_result(
r'''SELECT round(<float64>2.5);''',
[2],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-2.5);''',
[-3],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-1.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-0.5);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>0.5);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.5);''',
[2]
)
await self.assert_query_result(
r'''SELECT round(<decimal>2.5);''',
[3]
)
async def test_edgeql_functions_round_02(self):
await self.assert_query_result(
r'''SELECT round(1) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<float32>1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<bigint>1) IS bigint;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.2) IS decimal;''',
[True],
)
# rounding to a specified decimal place is only defined
# for decimals
await self.assert_query_result(
r'''SELECT round(<decimal>1.2, 0) IS decimal;''',
[True],
)
async def test_edgeql_functions_round_03(self):
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 10);''',
[123.456],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 3);''',
[123.456],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 2);''',
[123.46],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 1);''',
[123.5],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 0);''',
[123],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -1);''',
[120],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -2);''',
[100],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -3);''',
[0],
)
async def test_edgeql_functions_round_04(self):
await self.assert_query_result(
r'''
SELECT _ := round(<int64>Issue.number / 2)
ORDER BY _;
''',
[0, 1, 2, 2],
)
await self.assert_query_result(
r'''
SELECT _ := round(<decimal>Issue.number / 2)
ORDER BY _;
''',
[1, 1, 2, 2],
)
async def test_edgeql_functions_contains_01(self):
await self.assert_query_result(
r'''SELECT std::contains(<array<int64>>[], {1, 3});''',
[False, False],
)
await self.assert_query_result(
r'''SELECT contains([1], {1, 3});''',
[True, False],
)
await self.assert_query_result(
r'''SELECT contains([1, 2], 1);''',
[True],
)
await self.assert_query_result(
r'''SELECT contains([1, 2], 3);''',
[False],
)
await self.assert_query_result(
r'''SELECT contains(['a'], <str>{});''',
[],
)
async def test_edgeql_functions_contains_02(self):
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 2);
''',
[True],
)
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 5);
''',
[False],
)
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 5);
''',
[False],
)
async def test_edgeql_functions_contains_03(self):
await self.assert_query_result(
r'''SELECT contains(<str>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(<str>{}, 'a');''',
[],
)
await self.assert_query_result(
r'''SELECT contains('qwerty', <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains('qwerty', '');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'q');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'qwe');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'we');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 't');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'a');''',
{False},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'azerty');''',
{False},
)
async def test_edgeql_functions_contains_04(self):
await self.assert_query_result(
r'''SELECT contains(<bytes>{}, <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(<bytes>{}, b'a');''',
[],
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b't');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b'a');''',
{False},
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b'azerty');''',
{False},
)
async def test_edgeql_functions_contains_05(self):
await self.assert_query_result(
r'''
SELECT contains(
array_agg(User),
(SELECT User FILTER .name = 'Elvis')
)
''',
[True],
)
async def test_edgeql_functions_find_01(self):
await self.assert_query_result(
r'''SELECT find(<str>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<str>{}, 'a');''',
[],
)
await self.assert_query_result(
r'''SELECT find('qwerty', <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find('qwerty', '');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'q');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'qwe');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'we');''',
{1},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 't');''',
{4},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'a');''',
{-1},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'azerty');''',
{-1},
)
async def test_edgeql_functions_find_02(self):
await self.assert_query_result(
r'''SELECT find(<bytes>{}, <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'qwe');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'a');''',
{-1},
)
async def test_edgeql_functions_find_03(self):
await self.assert_query_result(
r'''SELECT find(<array<str>>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<array<str>>{}, 'the');''',
[],
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<array<str>>[], 'the');''',
{-1},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'the');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'fox');''',
{3},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'jumps');''',
{-1},
)
await self.assert_query_result(
r'''
SELECT find(['the', 'quick', 'brown', 'fox',
'jumps', 'over', 'the', 'lazy', 'dog'],
'the');
''',
{0},
)
await self.assert_query_result(
r'''
SELECT find(['the', 'quick', 'brown', 'fox',
'jumps', 'over', 'the', 'lazy', 'dog'],
'the', 1);
''',
{6},
)
async def test_edgeql_functions_str_case_01(self):
await self.assert_query_result(
r'''SELECT str_lower({'HeLlO', 'WoRlD!', 'ПриВет', 'мИр'});''',
{'hello', 'world!', 'привет', 'мир'},
)
await self.assert_query_result(
r'''SELECT str_upper({'HeLlO', 'WoRlD!'});''',
{'HELLO', 'WORLD!'},
)
await self.assert_query_result(
r'''SELECT str_title({'HeLlO', 'WoRlD!'});''',
{'Hello', 'World!'},
)
await self.assert_query_result(
r'''SELECT str_lower('HeLlO WoRlD!');''',
{'hello world!'},
)
await self.assert_query_result(
r'''SELECT str_upper('HeLlO WoRlD!');''',
{'HELLO WORLD!'},
)
await self.assert_query_result(
r'''SELECT str_title('HeLlO WoRlD!');''',
{'Hello World!'},
)
async def test_edgeql_functions_str_pad_01(self):
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20);''',
{' Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20, '>');''',
{'>>>>>>>>>>>>>>>Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20, '-->');''',
{'-->-->-->-->-->Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20);''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20, '<');''',
{'Hello<<<<<<<<<<<<<<<'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20, '<--');''',
{'Hello<--<--<--<--<--'},
)
# Call deprecated functions, too.
await self.assert_query_result(
r'''SELECT str_lpad('Hello', 20);''',
{' Hello'},
)
await self.assert_query_result(
r'''SELECT str_rpad('Hello', 20);''',
{'Hello '},
)
async def test_edgeql_functions_str_pad_02(self):
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2);''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2, '>');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2, '-->');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2);''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2, '<');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2, '<--');''',
{'He'},
)
async def test_edgeql_functions_str_pad_03(self):
await self.assert_query_result(
r'''
WITH l := {0, 2, 10, 20}
SELECT len(str_pad_start('Hello', l)) = l;
''',
[True, True, True, True],
)
await self.assert_query_result(
r'''
WITH l := {0, 2, 10, 20}
SELECT len(str_pad_end('Hello', l)) = l;
''',
[True, True, True, True],
)
async def test_edgeql_functions_str_trim_01(self):
await self.assert_query_result(
r'''SELECT str_trim(' Hello ');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start(' Hello ');''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_trim_end(' Hello ');''',
{' Hello'},
)
# Call deprecated functions, too.
await self.assert_query_result(
r'''SELECT str_ltrim(' Hello ');''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_rtrim(' Hello ');''',
{' Hello'},
)
async def test_edgeql_functions_str_trim_02(self):
await self.assert_query_result(
r'''SELECT str_trim_start(' Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start('>>>>>>>>>>>>>>>Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start('-->-->-->-->-->Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello ', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello<<<<<<<<<<<<<<<', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello<--<--<--<--<--', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''
SELECT str_trim(
'-->-->-->-->-->Hello<--<--<--<--<--', ' <->');
''',
{'Hello'},
)
async def test_edgeql_functions_str_repeat_01(self):
await self.assert_query_result(
r'''SELECT str_repeat('', 1);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', 0);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', -1);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('a', 1);''',
{'a'},
)
await self.assert_query_result(
r'''SELECT str_repeat('aa', 3);''',
{'aaaaaa'},
)
await self.assert_query_result(
r'''SELECT str_repeat('a', 0);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', -1);''',
{''},
)
async def test_edgeql_functions_math_abs_01(self):
await self.assert_query_result(
r'''SELECT math::abs(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::abs(-2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::abs(2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(-2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>-2.5);''',
{2.5},
)
async def test_edgeql_functions_math_abs_02(self):
await self.assert_query_result(
r'''SELECT math::abs(<int16>2) IS int16;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<int32>2) IS int32;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<float32>2) IS float32;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<float64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>2) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_ceil_01(self):
await self.assert_query_result(
r'''SELECT math::ceil(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::ceil(2.5);''',
{3},
)
await self.assert_query_result(
r'''SELECT math::ceil(-2.5);''',
{-2},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>2.5);''',
{3},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>-2.5);''',
{-2},
)
async def test_edgeql_functions_math_ceil_02(self):
await self.assert_query_result(
r'''SELECT math::ceil(<int16>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<int32>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<float32>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<float64>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<bigint>2) IS bigint;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>2.5) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_floor_01(self):
await self.assert_query_result(
r'''SELECT math::floor(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(2.5);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(-2.5);''',
{-3},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>2.5);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>-2.5);''',
{-3},
)
async def test_edgeql_functions_math_floor_02(self):
await self.assert_query_result(
r'''SELECT math::floor(<int16>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<int32>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<float32>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<float64>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<bigint>2) IS bigint;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>2.5) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_log_01(self):
await self.assert_query_result(
r'''SELECT math::ln({1, 10, 32});''',
{0, 2.30258509299405, 3.46573590279973},
)
await self.assert_query_result(
r'''SELECT math::lg({1, 10, 32});''',
{0, 1, 1.50514997831991},
)
await self.assert_query_result(
r'''SELECT math::log(<decimal>{1, 10, 32}, base := <decimal>2);''',
{0, 3.321928094887362, 5},
)
async def test_edgeql_functions_math_mean_01(self):
await self.assert_query_result(
r'''SELECT math::mean(1);''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::mean(1.5);''',
{1.5},
)
await self.assert_query_result(
r'''SELECT math::mean({1, 2, 3});''',
{2.0},
)
await self.assert_query_result(
r'''SELECT math::mean({1, 2, 3, 4});''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::mean({0.1, 0.2, 0.3});''',
{0.2},
)
await self.assert_query_result(
r'''SELECT math::mean({0.1, 0.2, 0.3, 0.4});''',
{0.25},
)
async def test_edgeql_functions_math_mean_02(self):
# int16 is implicitly cast in float32, which produces a
# float64 result
await self.assert_query_result(
r'''SELECT math::mean(<int16>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<int32>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<int64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<float32>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<float64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<decimal>2) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_mean_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := {1, 3, 1}
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_04(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := <float64>{1, 3, 1}
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_05(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := len(default::Named.name)
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_06(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := <float64>len(default::Named.name)
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_07(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := {3}
SELECT mean(A) * count(A);
''',
{3},
)
async def test_edgeql_functions_math_mean_08(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 3, 4}
SELECT mean(X) = sum(X) / count(X);
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.2, 0.3, 0.4}
SELECT mean(X) = sum(X) / count(X);
''',
{True},
)
async def test_edgeql_functions_math_mean_09(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to mean\(\): "
r"not enough elements in input set"):
await self.con.query(r'''
SELECT math::mean(<int64>{});
''')
async def test_edgeql_functions_math_stddev_01(self):
await self.assert_query_result(
r'''SELECT math::stddev({1, 1});''',
{0},
)
await self.assert_query_result(
r'''SELECT math::stddev({1, 1, -1, 1});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::stddev({1, 2, 3});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::stddev({0.1, 0.1, -0.1, 0.1});''',
{0.1},
)
await self.assert_query_result(
r'''SELECT math::stddev(<decimal>{0.1, 0.2, 0.3});''',
{0.1},
)
async def test_edgeql_functions_math_stddev_02(self):
await self.assert_query_result(
r'''SELECT math::stddev(<int16>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<int32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<int64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<float32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<float64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<decimal>{1, 1}) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_stddev_03(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev(<int64>{});
''')
async def test_edgeql_functions_math_stddev_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev(1);
''')
async def test_edgeql_functions_math_stddev_pop_01(self):
await self.assert_query_result(
r'''SELECT math::stddev_pop(1);''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({1, 1, 1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({1, 2, 1, 2});''',
{0.5},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({0.1, 0.1, 0.1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({0.1, 0.2, 0.1, 0.2});''',
{0.05},
)
async def test_edgeql_functions_math_stddev_pop_02(self):
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int16>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<float32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<float64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<decimal>1) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_stddev_pop_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev_pop\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev_pop(<int64>{});
''')
async def test_edgeql_functions_math_var_01(self):
await self.assert_query_result(
r'''SELECT math::var({1, 1});''',
{0},
)
await self.assert_query_result(
r'''SELECT math::var({1, 1, -1, 1});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::var({1, 2, 3});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::var({0.1, 0.1, -0.1, 0.1});''',
{0.01},
)
await self.assert_query_result(
r'''SELECT math::var(<decimal>{0.1, 0.2, 0.3});''',
{0.01},
)
async def test_edgeql_functions_math_var_02(self):
# int16 is implicitly cast in float32, which produces a
# float64 result
await self.assert_query_result(
r'''SELECT math::var(<int16>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<int32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<int64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<float32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<float64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<decimal>{1, 1}) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_var_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 1, -1, 1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 3}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.1, -0.1, 0.1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := <decimal>{0.1, 0.2, 0.3}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
async def test_edgeql_functions_math_var_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var(<int64>{});
''')
async def test_edgeql_functions_math_var_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var(1);
''')
async def test_edgeql_functions_math_var_pop_01(self):
await self.assert_query_result(
r'''SELECT math::var_pop(1);''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({1, 1, 1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({1, 2, 1, 2});''',
{0.25},
)
await self.assert_query_result(
r'''SELECT math::var_pop({0.1, 0.1, 0.1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({0.1, 0.2, 0.1, 0.2});''',
{0.0025},
)
async def test_edgeql_functions_math_var_pop_02(self):
await self.assert_query_result(
r'''SELECT math::var_pop(<int16>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<int32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<int64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<float32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<float64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<decimal>1) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_var_pop_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 1, 2}
SELECT abs(var_pop(X) - stddev_pop(X) ^ 2) < 1.0e-15;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.2, 0.1, 0.2}
SELECT abs(var_pop(X) - stddev_pop(X) ^ 2) < 1.0e-15;
''',
{True},
)
async def test_edgeql_functions_math_var_pop_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var_pop\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var_pop(<int64>{});
''')
async def test_edgeql_functions__genseries_01(self):
await self.assert_query_result(
r'''
SELECT _gen_series(1, 10)
''',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1, 10, 2)
''',
[1, 3, 5, 7, 9]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1n, 10n)
''',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1n, 10n, 2n)
''',
[1, 3, 5, 7, 9]
)
async def test_edgeql_functions_sequence_next_reset(self):
await self.con.execute('''
CREATE SCALAR TYPE my_seq_01 EXTENDING std::sequence;
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 1)
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 2)
await self.con.execute('''
SELECT sequence_reset(INTROSPECT my_seq_01)
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 1)
await self.con.execute('''
SELECT sequence_reset(INTROSPECT my_seq_01, 20)
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 21)
async def test_edgeql_functions__datetime_range_buckets(self):
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-01-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1 month');
''',
[
('2021-01-01T00:00:00+00:00', '2021-02-01T00:00:00+00:00'),
('2021-02-01T00:00:00+00:00', '2021-03-01T00:00:00+00:00'),
('2021-03-01T00:00:00+00:00', '2021-04-01T00:00:00+00:00'),
],
)
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-04-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1 month');
''',
[],
)
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-01-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1.5 months');
''',
[
('2021-01-01T00:00:00+00:00', '2021-02-16T00:00:00+00:00'),
('2021-02-16T00:00:00+00:00', '2021-03-31T00:00:00+00:00'),
],
)
| 29.901134
| 79
| 0.463155
| 14,907
| 142,449
| 4.228014
| 0.043939
| 0.102242
| 0.15184
| 0.202453
| 0.908547
| 0.899186
| 0.880575
| 0.835943
| 0.788963
| 0.730005
| 0
| 0.094596
| 0.391621
| 142,449
| 4,763
| 80
| 29.907411
| 0.632669
| 0.017866
| 0
| 0.644057
| 0
| 0.000306
| 0.10649
| 0.024934
| 0
| 0
| 0
| 0.00021
| 0.219982
| 1
| 0
| false
| 0
| 0.001833
| 0
| 0.00275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aaeec6476f8dee5f8e98ba3d7defabd378a4acc
| 472,279
|
py
|
Python
|
ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/message.py
|
lanpinguo/apple-sauce
|
b16e7b78e58d0d17ad7f05476f38704a6b519ece
|
[
"Apache-2.0"
] | 1
|
2021-05-14T15:33:21.000Z
|
2021-05-14T15:33:21.000Z
|
ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/message.py
|
lanpinguo/apple-sauce
|
b16e7b78e58d0d17ad7f05476f38704a6b519ece
|
[
"Apache-2.0"
] | null | null | null |
ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/message.py
|
lanpinguo/apple-sauce
|
b16e7b78e58d0d17ad7f05476f38704a6b519ece
|
[
"Apache-2.0"
] | 2
|
2019-07-13T06:58:33.000Z
|
2022-03-23T03:02:57.000Z
|
# Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
import const
import common
import action
import instruction
import oxm
import action_id
import instruction_id
import meter_band
import bsn_tlv
import util
import loxi.generic_util
class message(loxi.OFObject):
subtypes = {}
version = 4
def __init__(self, type=None, xid=None):
if type != None:
self.type = type
else:
self.type = 0
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('B', 1)
subclass = message.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = message()
_version = reader.read("!B")[0]
assert(_version == 4)
obj.type = reader.read("!B")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("message {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
class stats_reply(message):
subtypes = {}
version = 4
type = 19
def __init__(self, xid=None, stats_type=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if stats_type != None:
self.stats_type = stats_type
else:
self.stats_type = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = stats_reply.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.stats_type = reader.read("!H")[0]
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.stats_type != other.stats_type: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
message.subtypes[19] = stats_reply
class aggregate_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 2
def __init__(self, xid=None, flags=None, packet_count=None, byte_count=None, flow_count=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if packet_count != None:
self.packet_count = packet_count
else:
self.packet_count = 0
if byte_count != None:
self.byte_count = byte_count
else:
self.byte_count = 0
if flow_count != None:
self.flow_count = flow_count
else:
self.flow_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!Q", self.packet_count))
packed.append(struct.pack("!Q", self.byte_count))
packed.append(struct.pack("!L", self.flow_count))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = aggregate_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 2)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.packet_count = reader.read("!Q")[0]
obj.byte_count = reader.read("!Q")[0]
obj.flow_count = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
if self.flow_count != other.flow_count: return False
return True
def pretty_print(self, q):
q.text("aggregate_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("packet_count = ");
q.text("%#x" % self.packet_count)
q.text(","); q.breakable()
q.text("byte_count = ");
q.text("%#x" % self.byte_count)
q.text(","); q.breakable()
q.text("flow_count = ");
q.text("%#x" % self.flow_count)
q.breakable()
q.text('}')
stats_reply.subtypes[2] = aggregate_stats_reply
class stats_request(message):
subtypes = {}
version = 4
type = 18
def __init__(self, xid=None, stats_type=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if stats_type != None:
self.stats_type = stats_type
else:
self.stats_type = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = stats_request.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.stats_type = reader.read("!H")[0]
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.stats_type != other.stats_type: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
message.subtypes[18] = stats_request
class aggregate_stats_request(stats_request):
version = 4
type = 18
stats_type = 2
def __init__(self, xid=None, flags=None, table_id=None, out_port=None, out_group=None, cookie=None, cookie_mask=None, match=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if match != None:
self.match = match
else:
self.match = common.match()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!B", self.table_id))
packed.append('\x00' * 3)
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append('\x00' * 4)
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = aggregate_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 2)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.table_id = reader.read("!B")[0]
reader.skip(3)
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
reader.skip(4)
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.match = common.match.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.match != other.match: return False
return True
def pretty_print(self, q):
q.text("aggregate_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.breakable()
q.text('}')
stats_request.subtypes[2] = aggregate_stats_request
class async_get_reply(message):
version = 4
type = 27
def __init__(self, xid=None, packet_in_mask_equal_master=None, packet_in_mask_slave=None, port_status_mask_equal_master=None, port_status_mask_slave=None, flow_removed_mask_equal_master=None, flow_removed_mask_slave=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if packet_in_mask_equal_master != None:
self.packet_in_mask_equal_master = packet_in_mask_equal_master
else:
self.packet_in_mask_equal_master = 0
if packet_in_mask_slave != None:
self.packet_in_mask_slave = packet_in_mask_slave
else:
self.packet_in_mask_slave = 0
if port_status_mask_equal_master != None:
self.port_status_mask_equal_master = port_status_mask_equal_master
else:
self.port_status_mask_equal_master = 0
if port_status_mask_slave != None:
self.port_status_mask_slave = port_status_mask_slave
else:
self.port_status_mask_slave = 0
if flow_removed_mask_equal_master != None:
self.flow_removed_mask_equal_master = flow_removed_mask_equal_master
else:
self.flow_removed_mask_equal_master = 0
if flow_removed_mask_slave != None:
self.flow_removed_mask_slave = flow_removed_mask_slave
else:
self.flow_removed_mask_slave = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.packet_in_mask_equal_master))
packed.append(struct.pack("!L", self.packet_in_mask_slave))
packed.append(struct.pack("!L", self.port_status_mask_equal_master))
packed.append(struct.pack("!L", self.port_status_mask_slave))
packed.append(struct.pack("!L", self.flow_removed_mask_equal_master))
packed.append(struct.pack("!L", self.flow_removed_mask_slave))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = async_get_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 27)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.packet_in_mask_equal_master = reader.read("!L")[0]
obj.packet_in_mask_slave = reader.read("!L")[0]
obj.port_status_mask_equal_master = reader.read("!L")[0]
obj.port_status_mask_slave = reader.read("!L")[0]
obj.flow_removed_mask_equal_master = reader.read("!L")[0]
obj.flow_removed_mask_slave = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.packet_in_mask_equal_master != other.packet_in_mask_equal_master: return False
if self.packet_in_mask_slave != other.packet_in_mask_slave: return False
if self.port_status_mask_equal_master != other.port_status_mask_equal_master: return False
if self.port_status_mask_slave != other.port_status_mask_slave: return False
if self.flow_removed_mask_equal_master != other.flow_removed_mask_equal_master: return False
if self.flow_removed_mask_slave != other.flow_removed_mask_slave: return False
return True
def pretty_print(self, q):
q.text("async_get_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("packet_in_mask_equal_master = ");
q.text("%#x" % self.packet_in_mask_equal_master)
q.text(","); q.breakable()
q.text("packet_in_mask_slave = ");
q.text("%#x" % self.packet_in_mask_slave)
q.text(","); q.breakable()
q.text("port_status_mask_equal_master = ");
q.text("%#x" % self.port_status_mask_equal_master)
q.text(","); q.breakable()
q.text("port_status_mask_slave = ");
q.text("%#x" % self.port_status_mask_slave)
q.text(","); q.breakable()
q.text("flow_removed_mask_equal_master = ");
q.text("%#x" % self.flow_removed_mask_equal_master)
q.text(","); q.breakable()
q.text("flow_removed_mask_slave = ");
q.text("%#x" % self.flow_removed_mask_slave)
q.breakable()
q.text('}')
message.subtypes[27] = async_get_reply
class async_get_request(message):
version = 4
type = 26
def __init__(self, xid=None, packet_in_mask_equal_master=None, packet_in_mask_slave=None, port_status_mask_equal_master=None, port_status_mask_slave=None, flow_removed_mask_equal_master=None, flow_removed_mask_slave=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if packet_in_mask_equal_master != None:
self.packet_in_mask_equal_master = packet_in_mask_equal_master
else:
self.packet_in_mask_equal_master = 0
if packet_in_mask_slave != None:
self.packet_in_mask_slave = packet_in_mask_slave
else:
self.packet_in_mask_slave = 0
if port_status_mask_equal_master != None:
self.port_status_mask_equal_master = port_status_mask_equal_master
else:
self.port_status_mask_equal_master = 0
if port_status_mask_slave != None:
self.port_status_mask_slave = port_status_mask_slave
else:
self.port_status_mask_slave = 0
if flow_removed_mask_equal_master != None:
self.flow_removed_mask_equal_master = flow_removed_mask_equal_master
else:
self.flow_removed_mask_equal_master = 0
if flow_removed_mask_slave != None:
self.flow_removed_mask_slave = flow_removed_mask_slave
else:
self.flow_removed_mask_slave = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.packet_in_mask_equal_master))
packed.append(struct.pack("!L", self.packet_in_mask_slave))
packed.append(struct.pack("!L", self.port_status_mask_equal_master))
packed.append(struct.pack("!L", self.port_status_mask_slave))
packed.append(struct.pack("!L", self.flow_removed_mask_equal_master))
packed.append(struct.pack("!L", self.flow_removed_mask_slave))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = async_get_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 26)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.packet_in_mask_equal_master = reader.read("!L")[0]
obj.packet_in_mask_slave = reader.read("!L")[0]
obj.port_status_mask_equal_master = reader.read("!L")[0]
obj.port_status_mask_slave = reader.read("!L")[0]
obj.flow_removed_mask_equal_master = reader.read("!L")[0]
obj.flow_removed_mask_slave = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.packet_in_mask_equal_master != other.packet_in_mask_equal_master: return False
if self.packet_in_mask_slave != other.packet_in_mask_slave: return False
if self.port_status_mask_equal_master != other.port_status_mask_equal_master: return False
if self.port_status_mask_slave != other.port_status_mask_slave: return False
if self.flow_removed_mask_equal_master != other.flow_removed_mask_equal_master: return False
if self.flow_removed_mask_slave != other.flow_removed_mask_slave: return False
return True
def pretty_print(self, q):
q.text("async_get_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("packet_in_mask_equal_master = ");
q.text("%#x" % self.packet_in_mask_equal_master)
q.text(","); q.breakable()
q.text("packet_in_mask_slave = ");
q.text("%#x" % self.packet_in_mask_slave)
q.text(","); q.breakable()
q.text("port_status_mask_equal_master = ");
q.text("%#x" % self.port_status_mask_equal_master)
q.text(","); q.breakable()
q.text("port_status_mask_slave = ");
q.text("%#x" % self.port_status_mask_slave)
q.text(","); q.breakable()
q.text("flow_removed_mask_equal_master = ");
q.text("%#x" % self.flow_removed_mask_equal_master)
q.text(","); q.breakable()
q.text("flow_removed_mask_slave = ");
q.text("%#x" % self.flow_removed_mask_slave)
q.breakable()
q.text('}')
message.subtypes[26] = async_get_request
class async_set(message):
version = 4
type = 28
def __init__(self, xid=None, packet_in_mask_equal_master=None, packet_in_mask_slave=None, port_status_mask_equal_master=None, port_status_mask_slave=None, flow_removed_mask_equal_master=None, flow_removed_mask_slave=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if packet_in_mask_equal_master != None:
self.packet_in_mask_equal_master = packet_in_mask_equal_master
else:
self.packet_in_mask_equal_master = 0
if packet_in_mask_slave != None:
self.packet_in_mask_slave = packet_in_mask_slave
else:
self.packet_in_mask_slave = 0
if port_status_mask_equal_master != None:
self.port_status_mask_equal_master = port_status_mask_equal_master
else:
self.port_status_mask_equal_master = 0
if port_status_mask_slave != None:
self.port_status_mask_slave = port_status_mask_slave
else:
self.port_status_mask_slave = 0
if flow_removed_mask_equal_master != None:
self.flow_removed_mask_equal_master = flow_removed_mask_equal_master
else:
self.flow_removed_mask_equal_master = 0
if flow_removed_mask_slave != None:
self.flow_removed_mask_slave = flow_removed_mask_slave
else:
self.flow_removed_mask_slave = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.packet_in_mask_equal_master))
packed.append(struct.pack("!L", self.packet_in_mask_slave))
packed.append(struct.pack("!L", self.port_status_mask_equal_master))
packed.append(struct.pack("!L", self.port_status_mask_slave))
packed.append(struct.pack("!L", self.flow_removed_mask_equal_master))
packed.append(struct.pack("!L", self.flow_removed_mask_slave))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = async_set()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 28)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.packet_in_mask_equal_master = reader.read("!L")[0]
obj.packet_in_mask_slave = reader.read("!L")[0]
obj.port_status_mask_equal_master = reader.read("!L")[0]
obj.port_status_mask_slave = reader.read("!L")[0]
obj.flow_removed_mask_equal_master = reader.read("!L")[0]
obj.flow_removed_mask_slave = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.packet_in_mask_equal_master != other.packet_in_mask_equal_master: return False
if self.packet_in_mask_slave != other.packet_in_mask_slave: return False
if self.port_status_mask_equal_master != other.port_status_mask_equal_master: return False
if self.port_status_mask_slave != other.port_status_mask_slave: return False
if self.flow_removed_mask_equal_master != other.flow_removed_mask_equal_master: return False
if self.flow_removed_mask_slave != other.flow_removed_mask_slave: return False
return True
def pretty_print(self, q):
q.text("async_set {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("packet_in_mask_equal_master = ");
q.text("%#x" % self.packet_in_mask_equal_master)
q.text(","); q.breakable()
q.text("packet_in_mask_slave = ");
q.text("%#x" % self.packet_in_mask_slave)
q.text(","); q.breakable()
q.text("port_status_mask_equal_master = ");
q.text("%#x" % self.port_status_mask_equal_master)
q.text(","); q.breakable()
q.text("port_status_mask_slave = ");
q.text("%#x" % self.port_status_mask_slave)
q.text(","); q.breakable()
q.text("flow_removed_mask_equal_master = ");
q.text("%#x" % self.flow_removed_mask_equal_master)
q.text(","); q.breakable()
q.text("flow_removed_mask_slave = ");
q.text("%#x" % self.flow_removed_mask_slave)
q.breakable()
q.text('}')
message.subtypes[28] = async_set
class error_msg(message):
subtypes = {}
version = 4
type = 1
def __init__(self, xid=None, err_type=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if err_type != None:
self.err_type = err_type
else:
self.err_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = error_msg.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.err_type = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.err_type != other.err_type: return False
return True
def pretty_print(self, q):
q.text("error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
message.subtypes[1] = error_msg
class bad_action_error_msg(error_msg):
version = 4
type = 1
err_type = 2
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bad_action_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 2)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bad_action_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[2] = bad_action_error_msg
class bad_instruction_error_msg(error_msg):
version = 4
type = 1
err_type = 3
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bad_instruction_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 3)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bad_instruction_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[3] = bad_instruction_error_msg
class bad_match_error_msg(error_msg):
version = 4
type = 1
err_type = 4
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bad_match_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 4)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bad_match_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[4] = bad_match_error_msg
class bad_request_error_msg(error_msg):
version = 4
type = 1
err_type = 1
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bad_request_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 1)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bad_request_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[1] = bad_request_error_msg
class barrier_reply(message):
version = 4
type = 21
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = barrier_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 21)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("barrier_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
message.subtypes[21] = barrier_reply
class barrier_request(message):
version = 4
type = 20
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = barrier_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 20)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("barrier_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
message.subtypes[20] = barrier_request
class experimenter(message):
subtypes = {}
version = 4
type = 4
def __init__(self, xid=None, experimenter=None, subtype=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 8)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.experimenter = reader.read("!L")[0]
obj.subtype = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.experimenter != other.experimenter: return False
if self.subtype != other.subtype: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
message.subtypes[4] = experimenter
class bsn_header(experimenter):
subtypes = {}
version = 4
type = 4
experimenter = 6035143
def __init__(self, xid=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 12)
subclass = bsn_header.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_header()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn_header {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
experimenter.subtypes[6035143] = bsn_header
class bsn_arp_idle(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 60
def __init__(self, xid=None, vlan_vid=None, ipv4_addr=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if vlan_vid != None:
self.vlan_vid = vlan_vid
else:
self.vlan_vid = 0
if ipv4_addr != None:
self.ipv4_addr = ipv4_addr
else:
self.ipv4_addr = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.vlan_vid))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.ipv4_addr))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_arp_idle()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 60)
obj.vlan_vid = reader.read("!H")[0]
reader.skip(2)
obj.ipv4_addr = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.vlan_vid != other.vlan_vid: return False
if self.ipv4_addr != other.ipv4_addr: return False
return True
def pretty_print(self, q):
q.text("bsn_arp_idle {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("vlan_vid = ");
q.text("%#x" % self.vlan_vid)
q.text(","); q.breakable()
q.text("ipv4_addr = ");
q.text(util.pretty_ipv4(self.ipv4_addr))
q.breakable()
q.text('}')
bsn_header.subtypes[60] = bsn_arp_idle
class bsn_bw_clear_data_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 22
def __init__(self, xid=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_clear_data_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 22)
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_clear_data_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[22] = bsn_bw_clear_data_reply
class bsn_bw_clear_data_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 21
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_clear_data_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 21)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_clear_data_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[21] = bsn_bw_clear_data_request
class bsn_bw_enable_get_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 20
def __init__(self, xid=None, enabled=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enabled != None:
self.enabled = enabled
else:
self.enabled = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enabled))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_enable_get_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 20)
obj.enabled = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enabled != other.enabled: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_enable_get_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enabled = ");
q.text("%#x" % self.enabled)
q.breakable()
q.text('}')
bsn_header.subtypes[20] = bsn_bw_enable_get_reply
class bsn_bw_enable_get_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 19
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_enable_get_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 19)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_enable_get_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[19] = bsn_bw_enable_get_request
class bsn_bw_enable_set_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 23
def __init__(self, xid=None, enable=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enable != None:
self.enable = enable
else:
self.enable = 0
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enable))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_enable_set_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 23)
obj.enable = reader.read("!L")[0]
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enable != other.enable: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_enable_set_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enable = ");
q.text("%#x" % self.enable)
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[23] = bsn_bw_enable_set_reply
class bsn_bw_enable_set_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 18
def __init__(self, xid=None, enable=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enable != None:
self.enable = enable
else:
self.enable = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enable))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_bw_enable_set_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 18)
obj.enable = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enable != other.enable: return False
return True
def pretty_print(self, q):
q.text("bsn_bw_enable_set_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enable = ");
q.text("%#x" % self.enable)
q.breakable()
q.text('}')
bsn_header.subtypes[18] = bsn_bw_enable_set_request
class bsn_controller_connections_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 57
def __init__(self, xid=None, connections=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if connections != None:
self.connections = connections
else:
self.connections = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.connections))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_controller_connections_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 57)
obj.connections = loxi.generic_util.unpack_list(reader, common.bsn_controller_connection.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.connections != other.connections: return False
return True
def pretty_print(self, q):
q.text("bsn_controller_connections_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("connections = ");
q.pp(self.connections)
q.breakable()
q.text('}')
bsn_header.subtypes[57] = bsn_controller_connections_reply
class bsn_controller_connections_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 56
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_controller_connections_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 56)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_controller_connections_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[56] = bsn_controller_connections_request
class experimenter_stats_reply(stats_reply):
subtypes = {}
version = 4
type = 19
stats_type = 65535
def __init__(self, xid=None, flags=None, experimenter=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 16)
subclass = experimenter_stats_reply.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.experimenter = reader.read("!L")[0]
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.experimenter != other.experimenter: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("experimenter_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.breakable()
q.text('}')
stats_reply.subtypes[65535] = experimenter_stats_reply
class bsn_stats_reply(experimenter_stats_reply):
subtypes = {}
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
def __init__(self, xid=None, flags=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 20)
subclass = bsn_stats_reply.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
experimenter_stats_reply.subtypes[6035143] = bsn_stats_reply
class bsn_flow_checksum_bucket_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 10)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_flow_checksum_bucket_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_checksum_bucket_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[10] = bsn_flow_checksum_bucket_stats_reply
class experimenter_stats_request(stats_request):
subtypes = {}
version = 4
type = 18
stats_type = 65535
def __init__(self, xid=None, flags=None, experimenter=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 16)
subclass = experimenter_stats_request.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.experimenter = reader.read("!L")[0]
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.experimenter != other.experimenter: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("experimenter_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.breakable()
q.text('}')
stats_request.subtypes[65535] = experimenter_stats_request
class bsn_stats_request(experimenter_stats_request):
subtypes = {}
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
def __init__(self, xid=None, flags=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 20)
subclass = bsn_stats_request.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
experimenter_stats_request.subtypes[6035143] = bsn_stats_request
class bsn_flow_checksum_bucket_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, flags=None, table_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.table_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 10)
obj.table_id = reader.read("!B")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_checksum_bucket_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[10] = bsn_flow_checksum_bucket_stats_request
class bsn_flow_idle(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 40
def __init__(self, xid=None, cookie=None, priority=None, table_id=None, match=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if match != None:
self.match = match
else:
self.match = common.match()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!B", self.table_id))
packed.append('\x00' * 5)
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_idle()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 40)
obj.cookie = reader.read("!Q")[0]
obj.priority = reader.read("!H")[0]
obj.table_id = reader.read("!B")[0]
reader.skip(5)
obj.match = common.match.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.priority != other.priority: return False
if self.table_id != other.table_id: return False
if self.match != other.match: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_idle {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.breakable()
q.text('}')
bsn_header.subtypes[40] = bsn_flow_idle
class bsn_flow_idle_enable_get_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 39
def __init__(self, xid=None, enabled=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enabled != None:
self.enabled = enabled
else:
self.enabled = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enabled))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_idle_enable_get_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 39)
obj.enabled = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enabled != other.enabled: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_idle_enable_get_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enabled = ");
q.text("%#x" % self.enabled)
q.breakable()
q.text('}')
bsn_header.subtypes[39] = bsn_flow_idle_enable_get_reply
class bsn_flow_idle_enable_get_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 38
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_idle_enable_get_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 38)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_idle_enable_get_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[38] = bsn_flow_idle_enable_get_request
class bsn_flow_idle_enable_set_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 37
def __init__(self, xid=None, enable=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enable != None:
self.enable = enable
else:
self.enable = 0
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enable))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_idle_enable_set_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 37)
obj.enable = reader.read("!L")[0]
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enable != other.enable: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_idle_enable_set_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enable = ");
q.text("%#x" % self.enable)
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[37] = bsn_flow_idle_enable_set_reply
class bsn_flow_idle_enable_set_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 36
def __init__(self, xid=None, enable=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enable != None:
self.enable = enable
else:
self.enable = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.enable))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_idle_enable_set_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 36)
obj.enable = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enable != other.enable: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_idle_enable_set_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enable = ");
q.text("%#x" % self.enable)
q.breakable()
q.text('}')
bsn_header.subtypes[36] = bsn_flow_idle_enable_set_request
class bsn_gentable_bucket_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 5
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_bucket_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 5)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_gentable_bucket_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_bucket_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[5] = bsn_gentable_bucket_stats_reply
class bsn_gentable_bucket_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 5
def __init__(self, xid=None, flags=None, table_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_bucket_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 5)
obj.table_id = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_bucket_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[5] = bsn_gentable_bucket_stats_request
class bsn_gentable_clear_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 49
def __init__(self, xid=None, table_id=None, deleted_count=None, error_count=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if deleted_count != None:
self.deleted_count = deleted_count
else:
self.deleted_count = 0
if error_count != None:
self.error_count = error_count
else:
self.error_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.deleted_count))
packed.append(struct.pack("!L", self.error_count))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_clear_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 49)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.deleted_count = reader.read("!L")[0]
obj.error_count = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.deleted_count != other.deleted_count: return False
if self.error_count != other.error_count: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_clear_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("deleted_count = ");
q.text("%#x" % self.deleted_count)
q.text(","); q.breakable()
q.text("error_count = ");
q.text("%#x" % self.error_count)
q.breakable()
q.text('}')
bsn_header.subtypes[49] = bsn_gentable_clear_reply
class bsn_gentable_clear_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 48
def __init__(self, xid=None, table_id=None, checksum=None, checksum_mask=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
if checksum_mask != None:
self.checksum_mask = checksum_mask
else:
self.checksum_mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(util.pack_checksum_128(self.checksum))
packed.append(util.pack_checksum_128(self.checksum_mask))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_clear_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 48)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.checksum = util.unpack_checksum_128(reader)
obj.checksum_mask = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.checksum != other.checksum: return False
if self.checksum_mask != other.checksum_mask: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_clear_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.text(","); q.breakable()
q.text("checksum_mask = ");
q.pp(self.checksum_mask)
q.breakable()
q.text('}')
bsn_header.subtypes[48] = bsn_gentable_clear_request
class bsn_gentable_desc_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 4
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_desc_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 4)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_gentable_desc_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_desc_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[4] = bsn_gentable_desc_stats_reply
class bsn_gentable_desc_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 4
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_desc_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_desc_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[4] = bsn_gentable_desc_stats_request
class bsn_gentable_entry_add(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 46
def __init__(self, xid=None, table_id=None, checksum=None, key=None, value=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
if key != None:
self.key = key
else:
self.key = []
if value != None:
self.value = value
else:
self.value = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append(struct.pack("!H", 0)) # placeholder for key_length at index 7
packed.append(util.pack_checksum_128(self.checksum))
packed.append(loxi.generic_util.pack_list(self.key))
packed[7] = struct.pack("!H", len(packed[-1]))
packed.append(loxi.generic_util.pack_list(self.value))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_add()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 46)
obj.table_id = reader.read("!H")[0]
_key_length = reader.read("!H")[0]
obj.checksum = util.unpack_checksum_128(reader)
obj.key = loxi.generic_util.unpack_list(reader.slice(_key_length), bsn_tlv.bsn_tlv.unpack)
obj.value = loxi.generic_util.unpack_list(reader, bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.checksum != other.checksum: return False
if self.key != other.key: return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_add {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.text(","); q.breakable()
q.text("key = ");
q.pp(self.key)
q.text(","); q.breakable()
q.text("value = ");
q.pp(self.value)
q.breakable()
q.text('}')
bsn_header.subtypes[46] = bsn_gentable_entry_add
class bsn_gentable_entry_delete(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 47
def __init__(self, xid=None, table_id=None, key=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if key != None:
self.key = key
else:
self.key = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append(loxi.generic_util.pack_list(self.key))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_delete()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 47)
obj.table_id = reader.read("!H")[0]
obj.key = loxi.generic_util.unpack_list(reader, bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.key != other.key: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_delete {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("key = ");
q.pp(self.key)
q.breakable()
q.text('}')
bsn_header.subtypes[47] = bsn_gentable_entry_delete
class bsn_gentable_entry_desc_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 2
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_desc_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 2)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_gentable_entry_desc_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_desc_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[2] = bsn_gentable_entry_desc_stats_reply
class bsn_gentable_entry_desc_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 2
def __init__(self, xid=None, flags=None, table_id=None, checksum=None, checksum_mask=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
if checksum_mask != None:
self.checksum_mask = checksum_mask
else:
self.checksum_mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(util.pack_checksum_128(self.checksum))
packed.append(util.pack_checksum_128(self.checksum_mask))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_desc_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 2)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.checksum = util.unpack_checksum_128(reader)
obj.checksum_mask = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
if self.checksum != other.checksum: return False
if self.checksum_mask != other.checksum_mask: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_desc_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.text(","); q.breakable()
q.text("checksum_mask = ");
q.pp(self.checksum_mask)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[2] = bsn_gentable_entry_desc_stats_request
class bsn_gentable_entry_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 3
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 3)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_gentable_entry_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[3] = bsn_gentable_entry_stats_reply
class bsn_gentable_entry_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 3
def __init__(self, xid=None, flags=None, table_id=None, checksum=None, checksum_mask=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
if checksum_mask != None:
self.checksum_mask = checksum_mask
else:
self.checksum_mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(util.pack_checksum_128(self.checksum))
packed.append(util.pack_checksum_128(self.checksum_mask))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 3)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.checksum = util.unpack_checksum_128(reader)
obj.checksum_mask = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
if self.checksum != other.checksum: return False
if self.checksum_mask != other.checksum_mask: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.text(","); q.breakable()
q.text("checksum_mask = ");
q.pp(self.checksum_mask)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[3] = bsn_gentable_entry_stats_request
class bsn_gentable_set_buckets_size(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 50
def __init__(self, xid=None, table_id=None, buckets_size=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if buckets_size != None:
self.buckets_size = buckets_size
else:
self.buckets_size = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.buckets_size))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_set_buckets_size()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 50)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.buckets_size = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.buckets_size != other.buckets_size: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_set_buckets_size {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("buckets_size = ");
q.text("%#x" % self.buckets_size)
q.breakable()
q.text('}')
bsn_header.subtypes[50] = bsn_gentable_set_buckets_size
class bsn_gentable_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 7
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 7)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_gentable_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[7] = bsn_gentable_stats_reply
class bsn_gentable_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 7
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 7)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[7] = bsn_gentable_stats_request
class bsn_get_interfaces_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, interfaces=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if interfaces != None:
self.interfaces = interfaces
else:
self.interfaces = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.interfaces))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_interfaces_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 10)
obj.interfaces = loxi.generic_util.unpack_list(reader, common.bsn_interface.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.interfaces != other.interfaces: return False
return True
def pretty_print(self, q):
q.text("bsn_get_interfaces_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("interfaces = ");
q.pp(self.interfaces)
q.breakable()
q.text('}')
bsn_header.subtypes[10] = bsn_get_interfaces_reply
class bsn_get_interfaces_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 9
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_interfaces_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 9)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_get_interfaces_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[9] = bsn_get_interfaces_request
class bsn_get_mirroring_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 5
def __init__(self, xid=None, report_mirror_ports=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if report_mirror_ports != None:
self.report_mirror_ports = report_mirror_ports
else:
self.report_mirror_ports = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.report_mirror_ports))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_mirroring_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 5)
obj.report_mirror_ports = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.report_mirror_ports != other.report_mirror_ports: return False
return True
def pretty_print(self, q):
q.text("bsn_get_mirroring_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("report_mirror_ports = ");
q.text("%#x" % self.report_mirror_ports)
q.breakable()
q.text('}')
bsn_header.subtypes[5] = bsn_get_mirroring_reply
class bsn_get_mirroring_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 4
def __init__(self, xid=None, report_mirror_ports=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if report_mirror_ports != None:
self.report_mirror_ports = report_mirror_ports
else:
self.report_mirror_ports = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.report_mirror_ports))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_mirroring_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 4)
obj.report_mirror_ports = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.report_mirror_ports != other.report_mirror_ports: return False
return True
def pretty_print(self, q):
q.text("bsn_get_mirroring_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("report_mirror_ports = ");
q.text("%#x" % self.report_mirror_ports)
q.breakable()
q.text('}')
bsn_header.subtypes[4] = bsn_get_mirroring_request
class bsn_get_switch_pipeline_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 52
def __init__(self, xid=None, pipeline=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if pipeline != None:
self.pipeline = pipeline
else:
self.pipeline = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!256s", self.pipeline))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_switch_pipeline_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 52)
obj.pipeline = reader.read("!256s")[0].rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.pipeline != other.pipeline: return False
return True
def pretty_print(self, q):
q.text("bsn_get_switch_pipeline_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("pipeline = ");
q.pp(self.pipeline)
q.breakable()
q.text('}')
bsn_header.subtypes[52] = bsn_get_switch_pipeline_reply
class bsn_get_switch_pipeline_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 51
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_get_switch_pipeline_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 51)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_get_switch_pipeline_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[51] = bsn_get_switch_pipeline_request
class bsn_lacp_convergence_notif(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 43
def __init__(self, xid=None, convergence_status=None, port_no=None, actor_sys_priority=None, actor_sys_mac=None, actor_port_priority=None, actor_port_num=None, actor_key=None, partner_sys_priority=None, partner_sys_mac=None, partner_port_priority=None, partner_port_num=None, partner_key=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if convergence_status != None:
self.convergence_status = convergence_status
else:
self.convergence_status = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if actor_sys_priority != None:
self.actor_sys_priority = actor_sys_priority
else:
self.actor_sys_priority = 0
if actor_sys_mac != None:
self.actor_sys_mac = actor_sys_mac
else:
self.actor_sys_mac = [0,0,0,0,0,0]
if actor_port_priority != None:
self.actor_port_priority = actor_port_priority
else:
self.actor_port_priority = 0
if actor_port_num != None:
self.actor_port_num = actor_port_num
else:
self.actor_port_num = 0
if actor_key != None:
self.actor_key = actor_key
else:
self.actor_key = 0
if partner_sys_priority != None:
self.partner_sys_priority = partner_sys_priority
else:
self.partner_sys_priority = 0
if partner_sys_mac != None:
self.partner_sys_mac = partner_sys_mac
else:
self.partner_sys_mac = [0,0,0,0,0,0]
if partner_port_priority != None:
self.partner_port_priority = partner_port_priority
else:
self.partner_port_priority = 0
if partner_port_num != None:
self.partner_port_num = partner_port_num
else:
self.partner_port_num = 0
if partner_key != None:
self.partner_key = partner_key
else:
self.partner_key = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.convergence_status))
packed.append('\x00' * 3)
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!H", self.actor_sys_priority))
packed.append(struct.pack("!6B", *self.actor_sys_mac))
packed.append(struct.pack("!H", self.actor_port_priority))
packed.append(struct.pack("!H", self.actor_port_num))
packed.append(struct.pack("!H", self.actor_key))
packed.append(struct.pack("!H", self.partner_sys_priority))
packed.append(struct.pack("!6B", *self.partner_sys_mac))
packed.append(struct.pack("!H", self.partner_port_priority))
packed.append(struct.pack("!H", self.partner_port_num))
packed.append(struct.pack("!H", self.partner_key))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_lacp_convergence_notif()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 43)
obj.convergence_status = reader.read("!B")[0]
reader.skip(3)
obj.port_no = util.unpack_port_no(reader)
obj.actor_sys_priority = reader.read("!H")[0]
obj.actor_sys_mac = list(reader.read('!6B'))
obj.actor_port_priority = reader.read("!H")[0]
obj.actor_port_num = reader.read("!H")[0]
obj.actor_key = reader.read("!H")[0]
obj.partner_sys_priority = reader.read("!H")[0]
obj.partner_sys_mac = list(reader.read('!6B'))
obj.partner_port_priority = reader.read("!H")[0]
obj.partner_port_num = reader.read("!H")[0]
obj.partner_key = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.convergence_status != other.convergence_status: return False
if self.port_no != other.port_no: return False
if self.actor_sys_priority != other.actor_sys_priority: return False
if self.actor_sys_mac != other.actor_sys_mac: return False
if self.actor_port_priority != other.actor_port_priority: return False
if self.actor_port_num != other.actor_port_num: return False
if self.actor_key != other.actor_key: return False
if self.partner_sys_priority != other.partner_sys_priority: return False
if self.partner_sys_mac != other.partner_sys_mac: return False
if self.partner_port_priority != other.partner_port_priority: return False
if self.partner_port_num != other.partner_port_num: return False
if self.partner_key != other.partner_key: return False
return True
def pretty_print(self, q):
q.text("bsn_lacp_convergence_notif {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("convergence_status = ");
q.text("%#x" % self.convergence_status)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("actor_sys_priority = ");
q.text("%#x" % self.actor_sys_priority)
q.text(","); q.breakable()
q.text("actor_sys_mac = ");
q.text(util.pretty_mac(self.actor_sys_mac))
q.text(","); q.breakable()
q.text("actor_port_priority = ");
q.text("%#x" % self.actor_port_priority)
q.text(","); q.breakable()
q.text("actor_port_num = ");
q.text("%#x" % self.actor_port_num)
q.text(","); q.breakable()
q.text("actor_key = ");
q.text("%#x" % self.actor_key)
q.text(","); q.breakable()
q.text("partner_sys_priority = ");
q.text("%#x" % self.partner_sys_priority)
q.text(","); q.breakable()
q.text("partner_sys_mac = ");
q.text(util.pretty_mac(self.partner_sys_mac))
q.text(","); q.breakable()
q.text("partner_port_priority = ");
q.text("%#x" % self.partner_port_priority)
q.text(","); q.breakable()
q.text("partner_port_num = ");
q.text("%#x" % self.partner_port_num)
q.text(","); q.breakable()
q.text("partner_key = ");
q.text("%#x" % self.partner_key)
q.breakable()
q.text('}')
bsn_header.subtypes[43] = bsn_lacp_convergence_notif
class bsn_lacp_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 1
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_lacp_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 1)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_lacp_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_lacp_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[1] = bsn_lacp_stats_reply
class bsn_lacp_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 1
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_lacp_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 1)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("bsn_lacp_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[1] = bsn_lacp_stats_request
class bsn_pdu_rx_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 34
def __init__(self, xid=None, status=None, port_no=None, slot_num=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if slot_num != None:
self.slot_num = slot_num
else:
self.slot_num = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!B", self.slot_num))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_pdu_rx_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 34)
obj.status = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read("!B")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
if self.port_no != other.port_no: return False
if self.slot_num != other.slot_num: return False
return True
def pretty_print(self, q):
q.text("bsn_pdu_rx_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("slot_num = ");
q.text("%#x" % self.slot_num)
q.breakable()
q.text('}')
bsn_header.subtypes[34] = bsn_pdu_rx_reply
class bsn_pdu_rx_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 33
def __init__(self, xid=None, timeout_ms=None, port_no=None, slot_num=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if timeout_ms != None:
self.timeout_ms = timeout_ms
else:
self.timeout_ms = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if slot_num != None:
self.slot_num = slot_num
else:
self.slot_num = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.timeout_ms))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!B", self.slot_num))
packed.append('\x00' * 3)
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_pdu_rx_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 33)
obj.timeout_ms = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read("!B")[0]
reader.skip(3)
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.timeout_ms != other.timeout_ms: return False
if self.port_no != other.port_no: return False
if self.slot_num != other.slot_num: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bsn_pdu_rx_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("timeout_ms = ");
q.text("%#x" % self.timeout_ms)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("slot_num = ");
q.text("%#x" % self.slot_num)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
bsn_header.subtypes[33] = bsn_pdu_rx_request
class bsn_pdu_rx_timeout(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 35
def __init__(self, xid=None, port_no=None, slot_num=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if slot_num != None:
self.slot_num = slot_num
else:
self.slot_num = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!B", self.slot_num))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_pdu_rx_timeout()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 35)
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read("!B")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.port_no != other.port_no: return False
if self.slot_num != other.slot_num: return False
return True
def pretty_print(self, q):
q.text("bsn_pdu_rx_timeout {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("slot_num = ");
q.text("%#x" % self.slot_num)
q.breakable()
q.text('}')
bsn_header.subtypes[35] = bsn_pdu_rx_timeout
class bsn_pdu_tx_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 32
def __init__(self, xid=None, status=None, port_no=None, slot_num=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if slot_num != None:
self.slot_num = slot_num
else:
self.slot_num = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!B", self.slot_num))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_pdu_tx_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 32)
obj.status = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read("!B")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
if self.port_no != other.port_no: return False
if self.slot_num != other.slot_num: return False
return True
def pretty_print(self, q):
q.text("bsn_pdu_tx_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("slot_num = ");
q.text("%#x" % self.slot_num)
q.breakable()
q.text('}')
bsn_header.subtypes[32] = bsn_pdu_tx_reply
class bsn_pdu_tx_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 31
def __init__(self, xid=None, tx_interval_ms=None, port_no=None, slot_num=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if tx_interval_ms != None:
self.tx_interval_ms = tx_interval_ms
else:
self.tx_interval_ms = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if slot_num != None:
self.slot_num = slot_num
else:
self.slot_num = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.tx_interval_ms))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!B", self.slot_num))
packed.append('\x00' * 3)
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_pdu_tx_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 31)
obj.tx_interval_ms = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read("!B")[0]
reader.skip(3)
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.tx_interval_ms != other.tx_interval_ms: return False
if self.port_no != other.port_no: return False
if self.slot_num != other.slot_num: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("bsn_pdu_tx_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("tx_interval_ms = ");
q.text("%#x" % self.tx_interval_ms)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("slot_num = ");
q.text("%#x" % self.slot_num)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
bsn_header.subtypes[31] = bsn_pdu_tx_request
class bsn_port_counter_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 8
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_port_counter_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 8)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_port_counter_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_port_counter_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[8] = bsn_port_counter_stats_reply
class bsn_port_counter_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 8
def __init__(self, xid=None, flags=None, port_no=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(util.pack_port_no(self.port_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_port_counter_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 8)
obj.port_no = util.unpack_port_no(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.port_no != other.port_no: return False
return True
def pretty_print(self, q):
q.text("bsn_port_counter_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.breakable()
q.text('}')
bsn_stats_request.subtypes[8] = bsn_port_counter_stats_request
class bsn_role_status(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 55
def __init__(self, xid=None, role=None, reason=None, generation_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if role != None:
self.role = role
else:
self.role = 0
if reason != None:
self.reason = reason
else:
self.reason = 0
if generation_id != None:
self.generation_id = generation_id
else:
self.generation_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.role))
packed.append(struct.pack("!B", self.reason))
packed.append('\x00' * 3)
packed.append(struct.pack("!Q", self.generation_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_role_status()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 55)
obj.role = reader.read("!L")[0]
obj.reason = reader.read("!B")[0]
reader.skip(3)
obj.generation_id = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.role != other.role: return False
if self.reason != other.reason: return False
if self.generation_id != other.generation_id: return False
return True
def pretty_print(self, q):
q.text("bsn_role_status {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("role = ");
q.text("%#x" % self.role)
q.text(","); q.breakable()
q.text("reason = ");
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("generation_id = ");
q.text("%#x" % self.generation_id)
q.breakable()
q.text('}')
bsn_header.subtypes[55] = bsn_role_status
class bsn_set_aux_cxns_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 59
def __init__(self, xid=None, num_aux=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if num_aux != None:
self.num_aux = num_aux
else:
self.num_aux = 0
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.num_aux))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_aux_cxns_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 59)
obj.num_aux = reader.read("!L")[0]
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.num_aux != other.num_aux: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_set_aux_cxns_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("num_aux = ");
q.text("%#x" % self.num_aux)
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[59] = bsn_set_aux_cxns_reply
class bsn_set_aux_cxns_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 58
def __init__(self, xid=None, num_aux=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if num_aux != None:
self.num_aux = num_aux
else:
self.num_aux = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.num_aux))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_aux_cxns_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 58)
obj.num_aux = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.num_aux != other.num_aux: return False
return True
def pretty_print(self, q):
q.text("bsn_set_aux_cxns_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("num_aux = ");
q.text("%#x" % self.num_aux)
q.breakable()
q.text('}')
bsn_header.subtypes[58] = bsn_set_aux_cxns_request
class bsn_set_lacp_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 42
def __init__(self, xid=None, status=None, port_no=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
packed.append(util.pack_port_no(self.port_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_lacp_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 42)
obj.status = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
if self.port_no != other.port_no: return False
return True
def pretty_print(self, q):
q.text("bsn_set_lacp_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.breakable()
q.text('}')
bsn_header.subtypes[42] = bsn_set_lacp_reply
class bsn_set_lacp_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 41
def __init__(self, xid=None, enabled=None, port_no=None, actor_sys_priority=None, actor_sys_mac=None, actor_port_priority=None, actor_port_num=None, actor_key=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enabled != None:
self.enabled = enabled
else:
self.enabled = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if actor_sys_priority != None:
self.actor_sys_priority = actor_sys_priority
else:
self.actor_sys_priority = 0
if actor_sys_mac != None:
self.actor_sys_mac = actor_sys_mac
else:
self.actor_sys_mac = [0,0,0,0,0,0]
if actor_port_priority != None:
self.actor_port_priority = actor_port_priority
else:
self.actor_port_priority = 0
if actor_port_num != None:
self.actor_port_num = actor_port_num
else:
self.actor_port_num = 0
if actor_key != None:
self.actor_key = actor_key
else:
self.actor_key = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.enabled))
packed.append('\x00' * 3)
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!H", self.actor_sys_priority))
packed.append(struct.pack("!6B", *self.actor_sys_mac))
packed.append(struct.pack("!H", self.actor_port_priority))
packed.append(struct.pack("!H", self.actor_port_num))
packed.append(struct.pack("!H", self.actor_key))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_lacp_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 41)
obj.enabled = reader.read("!B")[0]
reader.skip(3)
obj.port_no = util.unpack_port_no(reader)
obj.actor_sys_priority = reader.read("!H")[0]
obj.actor_sys_mac = list(reader.read('!6B'))
obj.actor_port_priority = reader.read("!H")[0]
obj.actor_port_num = reader.read("!H")[0]
obj.actor_key = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enabled != other.enabled: return False
if self.port_no != other.port_no: return False
if self.actor_sys_priority != other.actor_sys_priority: return False
if self.actor_sys_mac != other.actor_sys_mac: return False
if self.actor_port_priority != other.actor_port_priority: return False
if self.actor_port_num != other.actor_port_num: return False
if self.actor_key != other.actor_key: return False
return True
def pretty_print(self, q):
q.text("bsn_set_lacp_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enabled = ");
q.text("%#x" % self.enabled)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("actor_sys_priority = ");
q.text("%#x" % self.actor_sys_priority)
q.text(","); q.breakable()
q.text("actor_sys_mac = ");
q.text(util.pretty_mac(self.actor_sys_mac))
q.text(","); q.breakable()
q.text("actor_port_priority = ");
q.text("%#x" % self.actor_port_priority)
q.text(","); q.breakable()
q.text("actor_port_num = ");
q.text("%#x" % self.actor_port_num)
q.text(","); q.breakable()
q.text("actor_key = ");
q.text("%#x" % self.actor_key)
q.breakable()
q.text('}')
bsn_header.subtypes[41] = bsn_set_lacp_request
class bsn_set_mirroring(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 3
def __init__(self, xid=None, report_mirror_ports=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if report_mirror_ports != None:
self.report_mirror_ports = report_mirror_ports
else:
self.report_mirror_ports = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.report_mirror_ports))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_mirroring()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 3)
obj.report_mirror_ports = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.report_mirror_ports != other.report_mirror_ports: return False
return True
def pretty_print(self, q):
q.text("bsn_set_mirroring {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("report_mirror_ports = ");
q.text("%#x" % self.report_mirror_ports)
q.breakable()
q.text('}')
bsn_header.subtypes[3] = bsn_set_mirroring
class bsn_set_pktin_suppression_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 25
def __init__(self, xid=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_pktin_suppression_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 25)
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_set_pktin_suppression_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[25] = bsn_set_pktin_suppression_reply
class bsn_set_pktin_suppression_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 11
def __init__(self, xid=None, enabled=None, idle_timeout=None, hard_timeout=None, priority=None, cookie=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if enabled != None:
self.enabled = enabled
else:
self.enabled = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!B", self.enabled))
packed.append('\x00' * 1)
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!Q", self.cookie))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_pktin_suppression_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 11)
obj.enabled = reader.read("!B")[0]
reader.skip(1)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.cookie = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.enabled != other.enabled: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.cookie != other.cookie: return False
return True
def pretty_print(self, q):
q.text("bsn_set_pktin_suppression_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("enabled = ");
q.text("%#x" % self.enabled)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.breakable()
q.text('}')
bsn_header.subtypes[11] = bsn_set_pktin_suppression_request
class bsn_set_switch_pipeline_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 54
def __init__(self, xid=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_switch_pipeline_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 54)
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_set_switch_pipeline_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[54] = bsn_set_switch_pipeline_reply
class bsn_set_switch_pipeline_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 53
def __init__(self, xid=None, pipeline=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if pipeline != None:
self.pipeline = pipeline
else:
self.pipeline = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!256s", self.pipeline))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_switch_pipeline_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 53)
obj.pipeline = reader.read("!256s")[0].rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.pipeline != other.pipeline: return False
return True
def pretty_print(self, q):
q.text("bsn_set_switch_pipeline_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("pipeline = ");
q.pp(self.pipeline)
q.breakable()
q.text('}')
bsn_header.subtypes[53] = bsn_set_switch_pipeline_request
class bsn_switch_pipeline_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 6
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_switch_pipeline_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 6)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_switch_pipeline_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_switch_pipeline_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[6] = bsn_switch_pipeline_stats_reply
class bsn_switch_pipeline_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 6
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_switch_pipeline_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 6)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("bsn_switch_pipeline_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[6] = bsn_switch_pipeline_stats_request
class bsn_table_checksum_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 11
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_table_checksum_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 11)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_table_checksum_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_table_checksum_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[11] = bsn_table_checksum_stats_reply
class bsn_table_checksum_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 11
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_table_checksum_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 11)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("bsn_table_checksum_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[11] = bsn_table_checksum_stats_request
class bsn_table_set_buckets_size(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 61
def __init__(self, xid=None, table_id=None, buckets_size=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if buckets_size != None:
self.buckets_size = buckets_size
else:
self.buckets_size = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.table_id))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.buckets_size))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_table_set_buckets_size()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 61)
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.buckets_size = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.buckets_size != other.buckets_size: return False
return True
def pretty_print(self, q):
q.text("bsn_table_set_buckets_size {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("buckets_size = ");
q.text("%#x" % self.buckets_size)
q.breakable()
q.text('}')
bsn_header.subtypes[61] = bsn_table_set_buckets_size
class bsn_time_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 45
def __init__(self, xid=None, time_ms=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if time_ms != None:
self.time_ms = time_ms
else:
self.time_ms = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!Q", self.time_ms))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_time_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 45)
obj.time_ms = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.time_ms != other.time_ms: return False
return True
def pretty_print(self, q):
q.text("bsn_time_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("time_ms = ");
q.text("%#x" % self.time_ms)
q.breakable()
q.text('}')
bsn_header.subtypes[45] = bsn_time_reply
class bsn_time_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 44
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_time_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 44)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("bsn_time_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
bsn_header.subtypes[44] = bsn_time_request
class bsn_virtual_port_create_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 16
def __init__(self, xid=None, status=None, vport_no=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
if vport_no != None:
self.vport_no = vport_no
else:
self.vport_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
packed.append(struct.pack("!L", self.vport_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_virtual_port_create_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 16)
obj.status = reader.read("!L")[0]
obj.vport_no = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
if self.vport_no != other.vport_no: return False
return True
def pretty_print(self, q):
q.text("bsn_virtual_port_create_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.text(","); q.breakable()
q.text("vport_no = ");
q.text("%#x" % self.vport_no)
q.breakable()
q.text('}')
bsn_header.subtypes[16] = bsn_virtual_port_create_reply
class bsn_virtual_port_create_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 15
def __init__(self, xid=None, vport=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if vport != None:
self.vport = vport
else:
self.vport = common.bsn_vport_q_in_q()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(self.vport.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_virtual_port_create_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 15)
obj.vport = common.bsn_vport_q_in_q.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.vport != other.vport: return False
return True
def pretty_print(self, q):
q.text("bsn_virtual_port_create_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("vport = ");
q.pp(self.vport)
q.breakable()
q.text('}')
bsn_header.subtypes[15] = bsn_virtual_port_create_request
class bsn_virtual_port_remove_reply(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 26
def __init__(self, xid=None, status=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if status != None:
self.status = status
else:
self.status = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.status))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_virtual_port_remove_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 26)
obj.status = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.status != other.status: return False
return True
def pretty_print(self, q):
q.text("bsn_virtual_port_remove_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("status = ");
q.text("%#x" % self.status)
q.breakable()
q.text('}')
bsn_header.subtypes[26] = bsn_virtual_port_remove_reply
class bsn_virtual_port_remove_request(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 17
def __init__(self, xid=None, vport_no=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if vport_no != None:
self.vport_no = vport_no
else:
self.vport_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.vport_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_virtual_port_remove_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 17)
obj.vport_no = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.vport_no != other.vport_no: return False
return True
def pretty_print(self, q):
q.text("bsn_virtual_port_remove_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("vport_no = ");
q.text("%#x" % self.vport_no)
q.breakable()
q.text('}')
bsn_header.subtypes[17] = bsn_virtual_port_remove_request
class bsn_vlan_counter_stats_reply(bsn_stats_reply):
version = 4
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 9
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_vlan_counter_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 9)
obj.entries = loxi.generic_util.unpack_list(reader, common.bsn_vlan_counter_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("bsn_vlan_counter_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
bsn_stats_reply.subtypes[9] = bsn_vlan_counter_stats_reply
class bsn_vlan_counter_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 9
def __init__(self, xid=None, flags=None, vlan_vid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if vlan_vid != None:
self.vlan_vid = vlan_vid
else:
self.vlan_vid = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!H", self.vlan_vid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_vlan_counter_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 65535)
obj.flags = reader.read("!H")[0]
reader.skip(4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 9)
obj.vlan_vid = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.vlan_vid != other.vlan_vid: return False
return True
def pretty_print(self, q):
q.text("bsn_vlan_counter_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("vlan_vid = ");
q.text("%#x" % self.vlan_vid)
q.breakable()
q.text('}')
bsn_stats_request.subtypes[9] = bsn_vlan_counter_stats_request
class desc_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 0
def __init__(self, xid=None, flags=None, mfr_desc=None, hw_desc=None, sw_desc=None, serial_num=None, dp_desc=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if mfr_desc != None:
self.mfr_desc = mfr_desc
else:
self.mfr_desc = ""
if hw_desc != None:
self.hw_desc = hw_desc
else:
self.hw_desc = ""
if sw_desc != None:
self.sw_desc = sw_desc
else:
self.sw_desc = ""
if serial_num != None:
self.serial_num = serial_num
else:
self.serial_num = ""
if dp_desc != None:
self.dp_desc = dp_desc
else:
self.dp_desc = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!256s", self.mfr_desc))
packed.append(struct.pack("!256s", self.hw_desc))
packed.append(struct.pack("!256s", self.sw_desc))
packed.append(struct.pack("!32s", self.serial_num))
packed.append(struct.pack("!256s", self.dp_desc))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = desc_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 0)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.mfr_desc = reader.read("!256s")[0].rstrip("\x00")
obj.hw_desc = reader.read("!256s")[0].rstrip("\x00")
obj.sw_desc = reader.read("!256s")[0].rstrip("\x00")
obj.serial_num = reader.read("!32s")[0].rstrip("\x00")
obj.dp_desc = reader.read("!256s")[0].rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.mfr_desc != other.mfr_desc: return False
if self.hw_desc != other.hw_desc: return False
if self.sw_desc != other.sw_desc: return False
if self.serial_num != other.serial_num: return False
if self.dp_desc != other.dp_desc: return False
return True
def pretty_print(self, q):
q.text("desc_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("mfr_desc = ");
q.pp(self.mfr_desc)
q.text(","); q.breakable()
q.text("hw_desc = ");
q.pp(self.hw_desc)
q.text(","); q.breakable()
q.text("sw_desc = ");
q.pp(self.sw_desc)
q.text(","); q.breakable()
q.text("serial_num = ");
q.pp(self.serial_num)
q.text(","); q.breakable()
q.text("dp_desc = ");
q.pp(self.dp_desc)
q.breakable()
q.text('}')
stats_reply.subtypes[0] = desc_stats_reply
class desc_stats_request(stats_request):
version = 4
type = 18
stats_type = 0
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = desc_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 0)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("desc_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[0] = desc_stats_request
class echo_reply(message):
version = 4
type = 3
def __init__(self, xid=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = echo_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 3)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("echo_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
message.subtypes[3] = echo_reply
class echo_request(message):
version = 4
type = 2
def __init__(self, xid=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = echo_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("echo_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
message.subtypes[2] = echo_request
class experimenter_error_msg(error_msg):
version = 4
type = 1
err_type = 65535
def __init__(self, xid=None, subtype=None, experimenter=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.subtype))
packed.append(struct.pack("!L", self.experimenter))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = experimenter_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 65535)
obj.subtype = reader.read("!H")[0]
obj.experimenter = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.subtype != other.subtype: return False
if self.experimenter != other.experimenter: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("experimenter_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.text(","); q.breakable()
q.text("experimenter = ");
q.text("%#x" % self.experimenter)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[65535] = experimenter_error_msg
class features_reply(message):
version = 4
type = 6
def __init__(self, xid=None, datapath_id=None, n_buffers=None, n_tables=None, auxiliary_id=None, capabilities=None, reserved=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if datapath_id != None:
self.datapath_id = datapath_id
else:
self.datapath_id = 0
if n_buffers != None:
self.n_buffers = n_buffers
else:
self.n_buffers = 0
if n_tables != None:
self.n_tables = n_tables
else:
self.n_tables = 0
if auxiliary_id != None:
self.auxiliary_id = auxiliary_id
else:
self.auxiliary_id = 0
if capabilities != None:
self.capabilities = capabilities
else:
self.capabilities = 0
if reserved != None:
self.reserved = reserved
else:
self.reserved = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.datapath_id))
packed.append(struct.pack("!L", self.n_buffers))
packed.append(struct.pack("!B", self.n_tables))
packed.append(struct.pack("!B", self.auxiliary_id))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.capabilities))
packed.append(struct.pack("!L", self.reserved))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = features_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 6)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.datapath_id = reader.read("!Q")[0]
obj.n_buffers = reader.read("!L")[0]
obj.n_tables = reader.read("!B")[0]
obj.auxiliary_id = reader.read("!B")[0]
reader.skip(2)
obj.capabilities = reader.read("!L")[0]
obj.reserved = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.datapath_id != other.datapath_id: return False
if self.n_buffers != other.n_buffers: return False
if self.n_tables != other.n_tables: return False
if self.auxiliary_id != other.auxiliary_id: return False
if self.capabilities != other.capabilities: return False
if self.reserved != other.reserved: return False
return True
def pretty_print(self, q):
q.text("features_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("datapath_id = ");
q.text("%#x" % self.datapath_id)
q.text(","); q.breakable()
q.text("n_buffers = ");
q.text("%#x" % self.n_buffers)
q.text(","); q.breakable()
q.text("n_tables = ");
q.text("%#x" % self.n_tables)
q.text(","); q.breakable()
q.text("auxiliary_id = ");
q.text("%#x" % self.auxiliary_id)
q.text(","); q.breakable()
q.text("capabilities = ");
q.text("%#x" % self.capabilities)
q.text(","); q.breakable()
q.text("reserved = ");
q.text("%#x" % self.reserved)
q.breakable()
q.text('}')
message.subtypes[6] = features_reply
class features_request(message):
version = 4
type = 5
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = features_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 5)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("features_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
message.subtypes[5] = features_request
class flow_mod(message):
subtypes = {}
version = 4
type = 14
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, _command=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if _command != None:
self._command = _command
else:
self._command = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('B', 25)
subclass = flow_mod.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = flow_mod()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
obj._command = util.unpack_fm_cmd(reader)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self._command != other._command: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_mod {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
message.subtypes[14] = flow_mod
class flow_add(flow_mod):
version = 4
type = 14
_command = 0
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_add()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
__command = util.unpack_fm_cmd(reader)
assert(__command == 0)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_add {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
flow_mod.subtypes[0] = flow_add
class flow_delete(flow_mod):
version = 4
type = 14
_command = 3
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_delete()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
__command = util.unpack_fm_cmd(reader)
assert(__command == 3)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_delete {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
flow_mod.subtypes[3] = flow_delete
class flow_delete_strict(flow_mod):
version = 4
type = 14
_command = 4
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_delete_strict()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
__command = util.unpack_fm_cmd(reader)
assert(__command == 4)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_delete_strict {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
flow_mod.subtypes[4] = flow_delete_strict
class flow_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 5
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_mod_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 5)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("flow_mod_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[5] = flow_mod_failed_error_msg
class flow_modify(flow_mod):
version = 4
type = 14
_command = 1
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_modify()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
__command = util.unpack_fm_cmd(reader)
assert(__command == 1)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_modify {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
flow_mod.subtypes[1] = flow_modify
class flow_modify_strict(flow_mod):
version = 4
type = 14
_command = 2
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, match=None, instructions=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if match != None:
self.match = match
else:
self.match = common.match()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(struct.pack("!B", self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 2)
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_modify_strict()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.table_id = reader.read("!B")[0]
__command = util.unpack_fm_cmd(reader)
assert(__command == 2)
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.priority = reader.read("!H")[0]
obj.buffer_id = reader.read("!L")[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
reader.skip(2)
obj.match = common.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.table_id != other.table_id: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.match != other.match: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_modify_strict {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
flow_mod.subtypes[2] = flow_modify_strict
class flow_removed(message):
version = 4
type = 11
def __init__(self, xid=None, cookie=None, priority=None, reason=None, table_id=None, duration_sec=None, duration_nsec=None, idle_timeout=None, hard_timeout=None, packet_count=None, byte_count=None, match=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if reason != None:
self.reason = reason
else:
self.reason = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if duration_sec != None:
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if duration_nsec != None:
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if packet_count != None:
self.packet_count = packet_count
else:
self.packet_count = 0
if byte_count != None:
self.byte_count = byte_count
else:
self.byte_count = 0
if match != None:
self.match = match
else:
self.match = common.match()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!B", self.reason))
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!L", self.duration_sec))
packed.append(struct.pack("!L", self.duration_nsec))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!Q", self.packet_count))
packed.append(struct.pack("!Q", self.byte_count))
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_removed()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 11)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.cookie = reader.read("!Q")[0]
obj.priority = reader.read("!H")[0]
obj.reason = reader.read("!B")[0]
obj.table_id = reader.read("!B")[0]
obj.duration_sec = reader.read("!L")[0]
obj.duration_nsec = reader.read("!L")[0]
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.packet_count = reader.read("!Q")[0]
obj.byte_count = reader.read("!Q")[0]
obj.match = common.match.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.cookie != other.cookie: return False
if self.priority != other.priority: return False
if self.reason != other.reason: return False
if self.table_id != other.table_id: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
if self.match != other.match: return False
return True
def pretty_print(self, q):
q.text("flow_removed {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("reason = ");
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("duration_sec = ");
q.text("%#x" % self.duration_sec)
q.text(","); q.breakable()
q.text("duration_nsec = ");
q.text("%#x" % self.duration_nsec)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("packet_count = ");
q.text("%#x" % self.packet_count)
q.text(","); q.breakable()
q.text("byte_count = ");
q.text("%#x" % self.byte_count)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.breakable()
q.text('}')
message.subtypes[11] = flow_removed
class flow_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 1
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 1)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.flow_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("flow_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[1] = flow_stats_reply
class flow_stats_request(stats_request):
version = 4
type = 18
stats_type = 1
def __init__(self, xid=None, flags=None, table_id=None, out_port=None, out_group=None, cookie=None, cookie_mask=None, match=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if cookie_mask != None:
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if match != None:
self.match = match
else:
self.match = common.match()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!B", self.table_id))
packed.append('\x00' * 3)
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append('\x00' * 4)
packed.append(struct.pack("!Q", self.cookie))
packed.append(struct.pack("!Q", self.cookie_mask))
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = flow_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 1)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.table_id = reader.read("!B")[0]
reader.skip(3)
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read("!L")[0]
reader.skip(4)
obj.cookie = reader.read("!Q")[0]
obj.cookie_mask = reader.read("!Q")[0]
obj.match = common.match.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.cookie != other.cookie: return False
if self.cookie_mask != other.cookie_mask: return False
if self.match != other.match: return False
return True
def pretty_print(self, q):
q.text("flow_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text(util.pretty_port(self.out_port))
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("cookie_mask = ");
q.text("%#x" % self.cookie_mask)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.breakable()
q.text('}')
stats_request.subtypes[1] = flow_stats_request
class get_config_reply(message):
version = 4
type = 8
def __init__(self, xid=None, flags=None, miss_send_len=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if miss_send_len != None:
self.miss_send_len = miss_send_len
else:
self.miss_send_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!H", self.miss_send_len))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = get_config_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 8)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
obj.miss_send_len = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.miss_send_len != other.miss_send_len: return False
return True
def pretty_print(self, q):
q.text("get_config_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("miss_send_len = ");
q.text("%#x" % self.miss_send_len)
q.breakable()
q.text('}')
message.subtypes[8] = get_config_reply
class get_config_request(message):
version = 4
type = 7
def __init__(self, xid=None):
if xid != None:
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = get_config_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 7)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
return True
def pretty_print(self, q):
q.text("get_config_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
message.subtypes[7] = get_config_request
class group_mod(message):
subtypes = {}
version = 4
type = 15
def __init__(self, xid=None, command=None, group_type=None, group_id=None, buckets=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if command != None:
self.command = command
else:
self.command = 0
if group_type != None:
self.group_type = group_type
else:
self.group_type = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if buckets != None:
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.command))
packed.append(struct.pack("!B", self.group_type))
packed.append('\x00' * 1)
packed.append(struct.pack("!L", self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = group_mod.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = group_mod()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.command = reader.read("!H")[0]
obj.group_type = reader.read("!B")[0]
reader.skip(1)
obj.group_id = reader.read("!L")[0]
obj.buckets = loxi.generic_util.unpack_list(reader, common.bucket.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.command != other.command: return False
if self.group_type != other.group_type: return False
if self.group_id != other.group_id: return False
if self.buckets != other.buckets: return False
return True
def pretty_print(self, q):
q.text("group_mod {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("group_type = ");
q.text("%#x" % self.group_type)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("buckets = ");
q.pp(self.buckets)
q.breakable()
q.text('}')
message.subtypes[15] = group_mod
class group_add(group_mod):
version = 4
type = 15
command = 0
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if group_type != None:
self.group_type = group_type
else:
self.group_type = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if buckets != None:
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.command))
packed.append(struct.pack("!B", self.group_type))
packed.append('\x00' * 1)
packed.append(struct.pack("!L", self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_add()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_command = reader.read("!H")[0]
assert(_command == 0)
obj.group_type = reader.read("!B")[0]
reader.skip(1)
obj.group_id = reader.read("!L")[0]
obj.buckets = loxi.generic_util.unpack_list(reader, common.bucket.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.group_type != other.group_type: return False
if self.group_id != other.group_id: return False
if self.buckets != other.buckets: return False
return True
def pretty_print(self, q):
q.text("group_add {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("group_type = ");
q.text("%#x" % self.group_type)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("buckets = ");
q.pp(self.buckets)
q.breakable()
q.text('}')
group_mod.subtypes[0] = group_add
class group_delete(group_mod):
version = 4
type = 15
command = 2
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if group_type != None:
self.group_type = group_type
else:
self.group_type = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if buckets != None:
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.command))
packed.append(struct.pack("!B", self.group_type))
packed.append('\x00' * 1)
packed.append(struct.pack("!L", self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_delete()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_command = reader.read("!H")[0]
assert(_command == 2)
obj.group_type = reader.read("!B")[0]
reader.skip(1)
obj.group_id = reader.read("!L")[0]
obj.buckets = loxi.generic_util.unpack_list(reader, common.bucket.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.group_type != other.group_type: return False
if self.group_id != other.group_id: return False
if self.buckets != other.buckets: return False
return True
def pretty_print(self, q):
q.text("group_delete {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("group_type = ");
q.text("%#x" % self.group_type)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("buckets = ");
q.pp(self.buckets)
q.breakable()
q.text('}')
group_mod.subtypes[2] = group_delete
class group_desc_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 7
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_desc_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 7)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.group_desc_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("group_desc_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[7] = group_desc_stats_reply
class group_desc_stats_request(stats_request):
version = 4
type = 18
stats_type = 7
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_desc_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 7)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("group_desc_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[7] = group_desc_stats_request
class group_features_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 8
def __init__(self, xid=None, flags=None, types=None, capabilities=None, max_groups_all=None, max_groups_select=None, max_groups_indirect=None, max_groups_ff=None, actions_all=None, actions_select=None, actions_indirect=None, actions_ff=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if types != None:
self.types = types
else:
self.types = 0
if capabilities != None:
self.capabilities = capabilities
else:
self.capabilities = 0
if max_groups_all != None:
self.max_groups_all = max_groups_all
else:
self.max_groups_all = 0
if max_groups_select != None:
self.max_groups_select = max_groups_select
else:
self.max_groups_select = 0
if max_groups_indirect != None:
self.max_groups_indirect = max_groups_indirect
else:
self.max_groups_indirect = 0
if max_groups_ff != None:
self.max_groups_ff = max_groups_ff
else:
self.max_groups_ff = 0
if actions_all != None:
self.actions_all = actions_all
else:
self.actions_all = 0
if actions_select != None:
self.actions_select = actions_select
else:
self.actions_select = 0
if actions_indirect != None:
self.actions_indirect = actions_indirect
else:
self.actions_indirect = 0
if actions_ff != None:
self.actions_ff = actions_ff
else:
self.actions_ff = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.types))
packed.append(struct.pack("!L", self.capabilities))
packed.append(struct.pack("!L", self.max_groups_all))
packed.append(struct.pack("!L", self.max_groups_select))
packed.append(struct.pack("!L", self.max_groups_indirect))
packed.append(struct.pack("!L", self.max_groups_ff))
packed.append(struct.pack("!L", self.actions_all))
packed.append(struct.pack("!L", self.actions_select))
packed.append(struct.pack("!L", self.actions_indirect))
packed.append(struct.pack("!L", self.actions_ff))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_features_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 8)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.types = reader.read("!L")[0]
obj.capabilities = reader.read("!L")[0]
obj.max_groups_all = reader.read("!L")[0]
obj.max_groups_select = reader.read("!L")[0]
obj.max_groups_indirect = reader.read("!L")[0]
obj.max_groups_ff = reader.read("!L")[0]
obj.actions_all = reader.read("!L")[0]
obj.actions_select = reader.read("!L")[0]
obj.actions_indirect = reader.read("!L")[0]
obj.actions_ff = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.types != other.types: return False
if self.capabilities != other.capabilities: return False
if self.max_groups_all != other.max_groups_all: return False
if self.max_groups_select != other.max_groups_select: return False
if self.max_groups_indirect != other.max_groups_indirect: return False
if self.max_groups_ff != other.max_groups_ff: return False
if self.actions_all != other.actions_all: return False
if self.actions_select != other.actions_select: return False
if self.actions_indirect != other.actions_indirect: return False
if self.actions_ff != other.actions_ff: return False
return True
def pretty_print(self, q):
q.text("group_features_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("types = ");
q.text("%#x" % self.types)
q.text(","); q.breakable()
q.text("capabilities = ");
q.text("%#x" % self.capabilities)
q.text(","); q.breakable()
q.text("max_groups_all = ");
q.text("%#x" % self.max_groups_all)
q.text(","); q.breakable()
q.text("max_groups_select = ");
q.text("%#x" % self.max_groups_select)
q.text(","); q.breakable()
q.text("max_groups_indirect = ");
q.text("%#x" % self.max_groups_indirect)
q.text(","); q.breakable()
q.text("max_groups_ff = ");
q.text("%#x" % self.max_groups_ff)
q.text(","); q.breakable()
q.text("actions_all = ");
q.text("%#x" % self.actions_all)
q.text(","); q.breakable()
q.text("actions_select = ");
q.text("%#x" % self.actions_select)
q.text(","); q.breakable()
q.text("actions_indirect = ");
q.text("%#x" % self.actions_indirect)
q.text(","); q.breakable()
q.text("actions_ff = ");
q.text("%#x" % self.actions_ff)
q.breakable()
q.text('}')
stats_reply.subtypes[8] = group_features_stats_reply
class group_features_stats_request(stats_request):
version = 4
type = 18
stats_type = 8
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_features_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 8)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("group_features_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[8] = group_features_stats_request
class group_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 6
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_mod_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 6)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("group_mod_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[6] = group_mod_failed_error_msg
class group_modify(group_mod):
version = 4
type = 15
command = 1
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if group_type != None:
self.group_type = group_type
else:
self.group_type = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if buckets != None:
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.command))
packed.append(struct.pack("!B", self.group_type))
packed.append('\x00' * 1)
packed.append(struct.pack("!L", self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_modify()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_command = reader.read("!H")[0]
assert(_command == 1)
obj.group_type = reader.read("!B")[0]
reader.skip(1)
obj.group_id = reader.read("!L")[0]
obj.buckets = loxi.generic_util.unpack_list(reader, common.bucket.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.group_type != other.group_type: return False
if self.group_id != other.group_id: return False
if self.buckets != other.buckets: return False
return True
def pretty_print(self, q):
q.text("group_modify {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("group_type = ");
q.text("%#x" % self.group_type)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("buckets = ");
q.pp(self.buckets)
q.breakable()
q.text('}')
group_mod.subtypes[1] = group_modify
class group_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 6
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 6)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.group_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("group_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[6] = group_stats_reply
class group_stats_request(stats_request):
version = 4
type = 18
stats_type = 6
def __init__(self, xid=None, flags=None, group_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.group_id))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 6)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.group_id = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.group_id != other.group_id: return False
return True
def pretty_print(self, q):
q.text("group_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.breakable()
q.text('}')
stats_request.subtypes[6] = group_stats_request
class hello(message):
version = 4
type = 0
def __init__(self, xid=None, elements=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if elements != None:
self.elements = elements
else:
self.elements = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(loxi.generic_util.pack_list(self.elements))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = hello()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.elements = loxi.generic_util.unpack_list(reader, common.hello_elem.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.elements != other.elements: return False
return True
def pretty_print(self, q):
q.text("hello {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("elements = ");
q.pp(self.elements)
q.breakable()
q.text('}')
message.subtypes[0] = hello
class hello_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 0
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = hello_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 0)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("hello_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[0] = hello_failed_error_msg
class meter_config_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 10
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_config_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 10)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, meter_band.meter_band.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("meter_config_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[10] = meter_config_stats_reply
class meter_config_stats_request(stats_request):
version = 4
type = 18
stats_type = 10
def __init__(self, xid=None, flags=None, meter_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if meter_id != None:
self.meter_id = meter_id
else:
self.meter_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.meter_id))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_config_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 10)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.meter_id = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.meter_id != other.meter_id: return False
return True
def pretty_print(self, q):
q.text("meter_config_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("meter_id = ");
q.text("%#x" % self.meter_id)
q.breakable()
q.text('}')
stats_request.subtypes[10] = meter_config_stats_request
class meter_features_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 11
def __init__(self, xid=None, flags=None, features=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if features != None:
self.features = features
else:
self.features = common.meter_features()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(self.features.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_features_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 11)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.features = common.meter_features.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.features != other.features: return False
return True
def pretty_print(self, q):
q.text("meter_features_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("features = ");
q.pp(self.features)
q.breakable()
q.text('}')
stats_reply.subtypes[11] = meter_features_stats_reply
class meter_features_stats_request(stats_request):
version = 4
type = 18
stats_type = 11
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_features_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 11)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("meter_features_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[11] = meter_features_stats_request
class meter_mod(message):
version = 4
type = 29
def __init__(self, xid=None, command=None, flags=None, meter_id=None, meters=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if command != None:
self.command = command
else:
self.command = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if meter_id != None:
self.meter_id = meter_id
else:
self.meter_id = 0
if meters != None:
self.meters = meters
else:
self.meters = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.command))
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!L", self.meter_id))
packed.append(loxi.generic_util.pack_list(self.meters))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_mod()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 29)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.command = reader.read("!H")[0]
obj.flags = reader.read("!H")[0]
obj.meter_id = reader.read("!L")[0]
obj.meters = loxi.generic_util.unpack_list(reader, meter_band.meter_band.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.command != other.command: return False
if self.flags != other.flags: return False
if self.meter_id != other.meter_id: return False
if self.meters != other.meters: return False
return True
def pretty_print(self, q):
q.text("meter_mod {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("command = ");
q.text("%#x" % self.command)
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("meter_id = ");
q.text("%#x" % self.meter_id)
q.text(","); q.breakable()
q.text("meters = ");
q.pp(self.meters)
q.breakable()
q.text('}')
message.subtypes[29] = meter_mod
class meter_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 12
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_mod_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 12)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("meter_mod_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[12] = meter_mod_failed_error_msg
class meter_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 9
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 9)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.meter_stats.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("meter_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[9] = meter_stats_reply
class meter_stats_request(stats_request):
version = 4
type = 18
stats_type = 9
def __init__(self, xid=None, flags=None, meter_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if meter_id != None:
self.meter_id = meter_id
else:
self.meter_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(struct.pack("!L", self.meter_id))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 9)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.meter_id = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.meter_id != other.meter_id: return False
return True
def pretty_print(self, q):
q.text("meter_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("meter_id = ");
q.text("%#x" % self.meter_id)
q.breakable()
q.text('}')
stats_request.subtypes[9] = meter_stats_request
class nicira_header(experimenter):
subtypes = {}
version = 4
type = 4
experimenter = 8992
def __init__(self, xid=None, subtype=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 12)
subclass = nicira_header.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = nicira_header()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_experimenter = reader.read("!L")[0]
assert(_experimenter == 8992)
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("nicira_header {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.breakable()
q.text('}')
experimenter.subtypes[8992] = nicira_header
class packet_in(message):
version = 4
type = 10
def __init__(self, xid=None, buffer_id=None, total_len=None, reason=None, table_id=None, cookie=None, match=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if total_len != None:
self.total_len = total_len
else:
self.total_len = 0
if reason != None:
self.reason = reason
else:
self.reason = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if match != None:
self.match = match
else:
self.match = common.match()
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(struct.pack("!H", self.total_len))
packed.append(struct.pack("!B", self.reason))
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!Q", self.cookie))
packed.append(self.match.pack())
packed.append('\x00' * 2)
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = packet_in()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 10)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.buffer_id = reader.read("!L")[0]
obj.total_len = reader.read("!H")[0]
obj.reason = reader.read("!B")[0]
obj.table_id = reader.read("!B")[0]
obj.cookie = reader.read("!Q")[0]
obj.match = common.match.unpack(reader)
reader.skip(2)
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.buffer_id != other.buffer_id: return False
if self.total_len != other.total_len: return False
if self.reason != other.reason: return False
if self.table_id != other.table_id: return False
if self.cookie != other.cookie: return False
if self.match != other.match: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("packet_in {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("total_len = ");
q.text("%#x" % self.total_len)
q.text(","); q.breakable()
q.text("reason = ");
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
message.subtypes[10] = packet_in
class packet_out(message):
version = 4
type = 13
def __init__(self, xid=None, buffer_id=None, in_port=None, actions=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if buffer_id != None:
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if in_port != None:
self.in_port = in_port
else:
self.in_port = 0
if actions != None:
self.actions = actions
else:
self.actions = []
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.buffer_id))
packed.append(util.pack_port_no(self.in_port))
packed.append(struct.pack("!H", 0)) # placeholder for actions_len at index 6
packed.append('\x00' * 6)
packed.append(loxi.generic_util.pack_list(self.actions))
packed[6] = struct.pack("!H", len(packed[-1]))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = packet_out()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 13)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.buffer_id = reader.read("!L")[0]
obj.in_port = util.unpack_port_no(reader)
_actions_len = reader.read("!H")[0]
reader.skip(6)
obj.actions = loxi.generic_util.unpack_list(reader.slice(_actions_len), action.action.unpack)
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.buffer_id != other.buffer_id: return False
if self.in_port != other.in_port: return False
if self.actions != other.actions: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("packet_out {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("buffer_id = ");
q.text("%#x" % self.buffer_id)
q.text(","); q.breakable()
q.text("in_port = ");
q.text(util.pretty_port(self.in_port))
q.text(","); q.breakable()
q.text("actions = ");
q.pp(self.actions)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
message.subtypes[13] = packet_out
class port_desc_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 13
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_desc_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 13)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.port_desc.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("port_desc_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[13] = port_desc_stats_reply
class port_desc_stats_request(stats_request):
version = 4
type = 18
stats_type = 13
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_desc_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 13)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("port_desc_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[13] = port_desc_stats_request
class port_mod(message):
version = 4
type = 16
def __init__(self, xid=None, port_no=None, hw_addr=None, config=None, mask=None, advertise=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if hw_addr != None:
self.hw_addr = hw_addr
else:
self.hw_addr = [0,0,0,0,0,0]
if config != None:
self.config = config
else:
self.config = 0
if mask != None:
self.mask = mask
else:
self.mask = 0
if advertise != None:
self.advertise = advertise
else:
self.advertise = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(util.pack_port_no(self.port_no))
packed.append('\x00' * 4)
packed.append(struct.pack("!6B", *self.hw_addr))
packed.append('\x00' * 2)
packed.append(struct.pack("!L", self.config))
packed.append(struct.pack("!L", self.mask))
packed.append(struct.pack("!L", self.advertise))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_mod()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 16)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
reader.skip(4)
obj.hw_addr = list(reader.read('!6B'))
reader.skip(2)
obj.config = reader.read("!L")[0]
obj.mask = reader.read("!L")[0]
obj.advertise = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.port_no != other.port_no: return False
if self.hw_addr != other.hw_addr: return False
if self.config != other.config: return False
if self.mask != other.mask: return False
if self.advertise != other.advertise: return False
return True
def pretty_print(self, q):
q.text("port_mod {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("hw_addr = ");
q.text(util.pretty_mac(self.hw_addr))
q.text(","); q.breakable()
q.text("config = ");
q.text("%#x" % self.config)
q.text(","); q.breakable()
q.text("mask = ");
q.text("%#x" % self.mask)
q.text(","); q.breakable()
q.text("advertise = ");
q.text("%#x" % self.advertise)
q.breakable()
q.text('}')
message.subtypes[16] = port_mod
class port_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 7
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_mod_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 7)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("port_mod_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[7] = port_mod_failed_error_msg
class port_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 4
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 4)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.port_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("port_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[4] = port_stats_reply
class port_stats_request(stats_request):
version = 4
type = 18
stats_type = 4
def __init__(self, xid=None, flags=None, port_no=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(util.pack_port_no(self.port_no))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 4)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.port_no = util.unpack_port_no(reader)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.port_no != other.port_no: return False
return True
def pretty_print(self, q):
q.text("port_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.breakable()
q.text('}')
stats_request.subtypes[4] = port_stats_request
class port_status(message):
version = 4
type = 12
def __init__(self, xid=None, reason=None, desc=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if reason != None:
self.reason = reason
else:
self.reason = 0
if desc != None:
self.desc = desc
else:
self.desc = common.port_desc()
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!B", self.reason))
packed.append('\x00' * 7)
packed.append(self.desc.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = port_status()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 12)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.reason = reader.read("!B")[0]
reader.skip(7)
obj.desc = common.port_desc.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.reason != other.reason: return False
if self.desc != other.desc: return False
return True
def pretty_print(self, q):
q.text("port_status {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("reason = ");
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("desc = ");
q.pp(self.desc)
q.breakable()
q.text('}')
message.subtypes[12] = port_status
class queue_get_config_reply(message):
version = 4
type = 23
def __init__(self, xid=None, port=None, queues=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if port != None:
self.port = port
else:
self.port = 0
if queues != None:
self.queues = queues
else:
self.queues = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(util.pack_port_no(self.port))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.queues))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = queue_get_config_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 23)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.port = util.unpack_port_no(reader)
reader.skip(4)
obj.queues = loxi.generic_util.unpack_list(reader, common.packet_queue.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.port != other.port: return False
if self.queues != other.queues: return False
return True
def pretty_print(self, q):
q.text("queue_get_config_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("port = ");
q.text(util.pretty_port(self.port))
q.text(","); q.breakable()
q.text("queues = ");
q.pp(self.queues)
q.breakable()
q.text('}')
message.subtypes[23] = queue_get_config_reply
class queue_get_config_request(message):
version = 4
type = 22
def __init__(self, xid=None, port=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if port != None:
self.port = port
else:
self.port = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(util.pack_port_no(self.port))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = queue_get_config_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 22)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.port = util.unpack_port_no(reader)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.port != other.port: return False
return True
def pretty_print(self, q):
q.text("queue_get_config_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("port = ");
q.text(util.pretty_port(self.port))
q.breakable()
q.text('}')
message.subtypes[22] = queue_get_config_request
class queue_op_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 9
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = queue_op_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 9)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("queue_op_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[9] = queue_op_failed_error_msg
class queue_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 5
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = queue_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 5)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.queue_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("queue_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[5] = queue_stats_reply
class queue_stats_request(stats_request):
version = 4
type = 18
stats_type = 5
def __init__(self, xid=None, flags=None, port_no=None, queue_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!L", self.queue_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = queue_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 5)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.port_no = util.unpack_port_no(reader)
obj.queue_id = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.port_no != other.port_no: return False
if self.queue_id != other.queue_id: return False
return True
def pretty_print(self, q):
q.text("queue_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.breakable()
q.text('}')
stats_request.subtypes[5] = queue_stats_request
class role_reply(message):
version = 4
type = 25
def __init__(self, xid=None, role=None, generation_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if role != None:
self.role = role
else:
self.role = 0
if generation_id != None:
self.generation_id = generation_id
else:
self.generation_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.role))
packed.append('\x00' * 4)
packed.append(struct.pack("!Q", self.generation_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = role_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 25)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.role = reader.read("!L")[0]
reader.skip(4)
obj.generation_id = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.role != other.role: return False
if self.generation_id != other.generation_id: return False
return True
def pretty_print(self, q):
q.text("role_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("role = ");
q.text("%#x" % self.role)
q.text(","); q.breakable()
q.text("generation_id = ");
q.text("%#x" % self.generation_id)
q.breakable()
q.text('}')
message.subtypes[25] = role_reply
class role_request(message):
version = 4
type = 24
def __init__(self, xid=None, role=None, generation_id=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if role != None:
self.role = role
else:
self.role = 0
if generation_id != None:
self.generation_id = generation_id
else:
self.generation_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!L", self.role))
packed.append('\x00' * 4)
packed.append(struct.pack("!Q", self.generation_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = role_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 24)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.role = reader.read("!L")[0]
reader.skip(4)
obj.generation_id = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.role != other.role: return False
if self.generation_id != other.generation_id: return False
return True
def pretty_print(self, q):
q.text("role_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("role = ");
q.text("%#x" % self.role)
q.text(","); q.breakable()
q.text("generation_id = ");
q.text("%#x" % self.generation_id)
q.breakable()
q.text('}')
message.subtypes[24] = role_request
class role_request_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 11
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = role_request_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 11)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("role_request_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[11] = role_request_failed_error_msg
class set_config(message):
version = 4
type = 9
def __init__(self, xid=None, flags=None, miss_send_len=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if miss_send_len != None:
self.miss_send_len = miss_send_len
else:
self.miss_send_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!H", self.miss_send_len))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_config()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 9)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
obj.miss_send_len = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.miss_send_len != other.miss_send_len: return False
return True
def pretty_print(self, q):
q.text("set_config {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("miss_send_len = ");
q.text("%#x" % self.miss_send_len)
q.breakable()
q.text('}')
message.subtypes[9] = set_config
class switch_config_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 10
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = switch_config_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 10)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("switch_config_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[10] = switch_config_failed_error_msg
class table_features_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 13
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_features_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 13)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("table_features_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[13] = table_features_failed_error_msg
class table_features_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 12
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_features_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 12)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.table_features.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("table_features_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[12] = table_features_stats_reply
class table_features_stats_request(stats_request):
version = 4
type = 18
stats_type = 12
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_features_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 12)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.table_features.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("table_features_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_request.subtypes[12] = table_features_stats_request
class table_mod(message):
version = 4
type = 17
def __init__(self, xid=None, table_id=None, config=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if config != None:
self.config = config
else:
self.config = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!B", self.table_id))
packed.append('\x00' * 3)
packed.append(struct.pack("!L", self.config))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_mod()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 17)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
obj.table_id = reader.read("!B")[0]
reader.skip(3)
obj.config = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.table_id != other.table_id: return False
if self.config != other.config: return False
return True
def pretty_print(self, q):
q.text("table_mod {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("config = ");
q.text("%#x" % self.config)
q.breakable()
q.text('}')
message.subtypes[17] = table_mod
class table_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 8
def __init__(self, xid=None, code=None, data=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if code != None:
self.code = code
else:
self.code = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.err_type))
packed.append(struct.pack("!H", self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_mod_failed_error_msg()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_err_type = reader.read("!H")[0]
assert(_err_type == 8)
obj.code = reader.read("!H")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("table_mod_failed_error_msg {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("code = ");
q.text("%#x" % self.code)
q.text(","); q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
error_msg.subtypes[8] = table_mod_failed_error_msg
class table_stats_reply(stats_reply):
version = 4
type = 19
stats_type = 3
def __init__(self, xid=None, flags=None, entries=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_stats_reply()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 3)
obj.flags = reader.read("!H")[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, common.table_stats_entry.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("table_stats_reply {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
stats_reply.subtypes[3] = table_stats_reply
class table_stats_request(stats_request):
version = 4
type = 18
stats_type = 3
def __init__(self, xid=None, flags=None):
if xid != None:
self.xid = xid
else:
self.xid = None
if flags != None:
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.version))
packed.append(struct.pack("!B", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(struct.pack("!L", self.xid))
packed.append(struct.pack("!H", self.stats_type))
packed.append(struct.pack("!H", self.flags))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = table_stats_request()
_version = reader.read("!B")[0]
assert(_version == 4)
_type = reader.read("!B")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length - (2 + 2))
obj.xid = reader.read("!L")[0]
_stats_type = reader.read("!H")[0]
assert(_stats_type == 3)
obj.flags = reader.read("!H")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.xid != other.xid: return False
if self.flags != other.flags: return False
return True
def pretty_print(self, q):
q.text("table_stats_request {")
with q.group():
with q.indent(2):
q.breakable()
q.text("xid = ");
if self.xid != None:
q.text("%#x" % self.xid)
else:
q.text('None')
q.text(","); q.breakable()
q.text("flags = ");
q.text("%#x" % self.flags)
q.breakable()
q.text('}')
stats_request.subtypes[3] = table_stats_request
def parse_header(buf):
if len(buf) < 8:
raise loxi.ProtocolError("too short to be an OpenFlow message")
return struct.unpack_from("!BBHL", buf)
def parse_message(buf):
msg_ver, msg_type, msg_len, msg_xid = parse_header(buf)
if msg_ver != const.OFP_VERSION and msg_type != const.OFPT_HELLO:
raise loxi.ProtocolError("wrong OpenFlow version (expected %d, got %d)" % (const.OFP_VERSION, msg_ver))
if len(buf) != msg_len:
raise loxi.ProtocolError("incorrect message size")
return message.unpack(loxi.generic_util.OFReader(buf))
| 33.847846
| 298
| 0.524108
| 57,749
| 472,279
| 4.134617
| 0.006009
| 0.040457
| 0.087825
| 0.107342
| 0.956196
| 0.941806
| 0.933266
| 0.920472
| 0.912724
| 0.90957
| 0
| 0.018316
| 0.332522
| 472,279
| 13,952
| 299
| 33.850272
| 0.739116
| 0.01226
| 0
| 0.894605
| 0
| 0
| 0.037607
| 0.005994
| 0
| 0
| 0
| 0
| 0.041846
| 1
| 0.061832
| false
| 0
| 0.001015
| 0
| 0.168553
| 0.012335
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0abda42b690c606229130ada00fdc16a0c5165c8
| 1,727
|
py
|
Python
|
tracker/person_/pre_img/preprocess.py
|
msadegh97/tracker-person
|
0e2f8378546981b280be42853bb6d2ed9cbc9137
|
[
"MIT"
] | null | null | null |
tracker/person_/pre_img/preprocess.py
|
msadegh97/tracker-person
|
0e2f8378546981b280be42853bb6d2ed9cbc9137
|
[
"MIT"
] | null | null | null |
tracker/person_/pre_img/preprocess.py
|
msadegh97/tracker-person
|
0e2f8378546981b280be42853bb6d2ed9cbc9137
|
[
"MIT"
] | null | null | null |
import PIL
from PIL import Image
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
for i in os.listdir('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/trainAno/'):
annotation = open('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/trainAno/'+i)
annotation = annotation.readlines()[1:]
for j in annotation:
g = 0
annot = j.split()
image = Image.open('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/train/'+ i[0:-4]+ '.jpg')
new_image = image.crop((int(annot[1]), int(annot[2]), int(annot[1])+ int(annot[3]), int(annot[2])+ int(annot[4])))
new_image = new_image.resize((120,360))
new_image.save('/home/msadegh/workspace/embedded_project/tracker/person_/person_dataset/'+
i[0:-4] + '_' +str(g) + '_1.jpg')
g+=1
for i in os.listdir('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/trainAno/'):
annotation = open('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/trainAno/'+i)
annotation = annotation.readlines()[1:]
for j in annotation:
g = 0
annot = j.split()
image = Image.open('/home/msadegh/workspace/embedded_project/tracker/person_/precarious_dataset/train/'+ i[0:-4]+ '.jpg')
new_image = image.crop((int(annot[1])+200, int(annot[2]), int(annot[1])+ int(annot[3])+200, int(annot[2])+ int(annot[4])))
new_image = new_image.resize((120,360))
new_image.save('/home/msadegh/workspace/embedded_project/tracker/person_/not_person/'+
i[0:-4] + '_' +str(g) + '_0.jpg')
g+=1
| 47.972222
| 130
| 0.666474
| 239
| 1,727
| 4.665272
| 0.213389
| 0.086099
| 0.143498
| 0.200897
| 0.886099
| 0.873543
| 0.868161
| 0.868161
| 0.868161
| 0.819731
| 0
| 0.032123
| 0.170816
| 1,727
| 35
| 131
| 49.342857
| 0.746508
| 0
| 0
| 0.6
| 0
| 0
| 0.38564
| 0.372901
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e40867f58dd3e0859c485ca342b2eca0965b0bfd
| 33,562
|
py
|
Python
|
wavefront_api_client/api/saved_traces_search_group_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/saved_traces_search_group_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/saved_traces_search_group_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class SavedTracesSearchGroupApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_saved_traces_search_to_group(self, id, search_id, **kwargs): # noqa: E501
"""Add a search to a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_saved_traces_search_to_group(id, search_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str search_id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_saved_traces_search_to_group_with_http_info(id, search_id, **kwargs) # noqa: E501
else:
(data) = self.add_saved_traces_search_to_group_with_http_info(id, search_id, **kwargs) # noqa: E501
return data
def add_saved_traces_search_to_group_with_http_info(self, id, search_id, **kwargs): # noqa: E501
"""Add a search to a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_saved_traces_search_to_group_with_http_info(id, search_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str search_id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'search_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_saved_traces_search_to_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_saved_traces_search_to_group`") # noqa: E501
# verify the required parameter 'search_id' is set
if self.api_client.client_side_validation and ('search_id' not in params or
params['search_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `search_id` when calling `add_saved_traces_search_to_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'search_id' in params:
path_params['searchId'] = params['search_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}/addSearch/{searchId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_saved_traces_search_group(self, **kwargs): # noqa: E501
"""Create a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_traces_search_group(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedTracesSearchGroup body: Example Body: <pre>{ \"name\": \"Search Group 1\" }</pre>
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_saved_traces_search_group_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_saved_traces_search_group_with_http_info(**kwargs) # noqa: E501
return data
def create_saved_traces_search_group_with_http_info(self, **kwargs): # noqa: E501
"""Create a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_saved_traces_search_group_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SavedTracesSearchGroup body: Example Body: <pre>{ \"name\": \"Search Group 1\" }</pre>
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_saved_traces_search_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedTracesSearchGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_saved_traces_search_group(self, id, **kwargs): # noqa: E501
"""Delete a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_traces_search_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_saved_traces_search_group_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_traces_search_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_saved_traces_search_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_saved_traces_search_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedTracesSearchGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_saved_traces_search_group(self, **kwargs): # noqa: E501
"""Get all search groups for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_traces_search_group(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_saved_traces_search_group_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_saved_traces_search_group_with_http_info(**kwargs) # noqa: E501
return data
def get_all_saved_traces_search_group_with_http_info(self, **kwargs): # noqa: E501
"""Get all search groups for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_saved_traces_search_group_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_saved_traces_search_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedTracesSearchGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_saved_traces_search_group(self, id, **kwargs): # noqa: E501
"""Get a specific search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_traces_search_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
return data
def get_saved_traces_search_group_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a specific search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_traces_search_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_saved_traces_search_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_saved_traces_search_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedTracesSearchGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_saved_traces_searches_for_group(self, id, **kwargs): # noqa: E501
"""Get all searches for a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_traces_searches_for_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedTracesSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_saved_traces_searches_for_group_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_saved_traces_searches_for_group_with_http_info(id, **kwargs) # noqa: E501
return data
def get_saved_traces_searches_for_group_with_http_info(self, id, **kwargs): # noqa: E501
"""Get all searches for a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_saved_traces_searches_for_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param int offset:
:param int limit:
:return: ResponseContainerPagedSavedTracesSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_saved_traces_searches_for_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_saved_traces_searches_for_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}/searches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSavedTracesSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_saved_traces_search_from_group(self, id, search_id, **kwargs): # noqa: E501
"""Remove a search from a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_saved_traces_search_from_group(id, search_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str search_id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_saved_traces_search_from_group_with_http_info(id, search_id, **kwargs) # noqa: E501
else:
(data) = self.remove_saved_traces_search_from_group_with_http_info(id, search_id, **kwargs) # noqa: E501
return data
def remove_saved_traces_search_from_group_with_http_info(self, id, search_id, **kwargs): # noqa: E501
"""Remove a search from a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_saved_traces_search_from_group_with_http_info(id, search_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str search_id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'search_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_saved_traces_search_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_saved_traces_search_from_group`") # noqa: E501
# verify the required parameter 'search_id' is set
if self.api_client.client_side_validation and ('search_id' not in params or
params['search_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `search_id` when calling `remove_saved_traces_search_from_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'search_id' in params:
path_params['searchId'] = params['search_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}/removeSearch/{searchId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_saved_traces_search_group(self, id, **kwargs): # noqa: E501
"""Update a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_traces_search_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedTracesSearchGroup body: Example Body: <pre>{ \"name\": \"Search Group 1\" }</pre>
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_saved_traces_search_group_with_http_info(id, **kwargs) # noqa: E501
return data
def update_saved_traces_search_group_with_http_info(self, id, **kwargs): # noqa: E501
"""Update a search group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_saved_traces_search_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SavedTracesSearchGroup body: Example Body: <pre>{ \"name\": \"Search Group 1\" }</pre>
:return: ResponseContainerSavedTracesSearchGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_saved_traces_search_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `update_saved_traces_search_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/savedtracessearchgroup/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSavedTracesSearchGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.387485
| 409
| 0.61075
| 3,835
| 33,562
| 5.066232
| 0.052934
| 0.05394
| 0.048999
| 0.043028
| 0.954604
| 0.95306
| 0.948325
| 0.940759
| 0.937053
| 0.930825
| 0
| 0.017532
| 0.299803
| 33,562
| 830
| 410
| 40.436145
| 0.809234
| 0.317383
| 0
| 0.82167
| 0
| 0
| 0.186325
| 0.077418
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038375
| false
| 0
| 0.009029
| 0
| 0.103837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c5495ab735c72fca7c48e25af60c2ac749f364a
| 286
|
py
|
Python
|
amazon_products/__init__.py
|
joshloyal/pydata-amazon-products
|
fdfe4d0cd49b12fa5a74b05f5d862bb13e7bd72d
|
[
"MIT"
] | 34
|
2017-04-03T03:52:34.000Z
|
2020-12-27T18:48:56.000Z
|
amazon_products/__init__.py
|
joshloyal/pydata-amazon-products
|
fdfe4d0cd49b12fa5a74b05f5d862bb13e7bd72d
|
[
"MIT"
] | 1
|
2017-08-18T07:05:54.000Z
|
2017-08-18T07:05:54.000Z
|
amazon_products/__init__.py
|
joshloyal/pydata-amazon-products
|
fdfe4d0cd49b12fa5a74b05f5d862bb13e7bd72d
|
[
"MIT"
] | 8
|
2017-04-04T15:24:37.000Z
|
2019-05-20T08:56:53.000Z
|
from amazon_products import image_utils
from amazon_products import image_features
from amazon_products import resnet
from amazon_products import text_generators
from amazon_products import image_generators
from amazon_products import text_plots
from amazon_products import bokeh_plots
| 35.75
| 44
| 0.902098
| 41
| 286
| 5.97561
| 0.292683
| 0.285714
| 0.514286
| 0.685714
| 0.665306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097902
| 286
| 7
| 45
| 40.857143
| 0.949612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c8efc966b970c85604f3ea3225d710e17fa0d94
| 50,172
|
py
|
Python
|
apps/DBspeedTest/_mds_id.py
|
jeonghoonkang/BerePi
|
e04283a94a6a0487ab0049dc3e514d6c5dda39cc
|
[
"BSD-2-Clause"
] | 22
|
2015-06-03T06:28:27.000Z
|
2022-03-18T08:02:45.000Z
|
apps/DBspeedTest/_mds_id.py
|
jeonghoonkang/BerePi
|
e04283a94a6a0487ab0049dc3e514d6c5dda39cc
|
[
"BSD-2-Clause"
] | 14
|
2015-06-08T01:31:53.000Z
|
2020-08-30T02:19:15.000Z
|
apps/DBspeedTest/_mds_id.py
|
jeonghoonkang/BerePi
|
e04283a94a6a0487ab0049dc3e514d6c5dda39cc
|
[
"BSD-2-Clause"
] | 26
|
2015-05-12T09:33:55.000Z
|
2021-08-30T05:41:00.000Z
|
mds_id_list = ['00-250068136', '00-250071518', '00-250102669', '00-250124842', '00-250071520', '00-250068182', '00-250071525', '06-25-0068185', '00-250071427', '01223192096-0001', '00-360000221', '00-450083432', '00-360000201', '01223187332-0001', '00-250068169', '00-250068178', '00-250059834', '00-250070991', '00-250071407', '00-250071409', '00-250071428', '00-360000247', '06-25-0071364', '06-25-0071217', '00-250071548', '00-250059976', '06-25-0068176', '00-250071459', '00-250059912', '00-250059913', '00-360000037', '00-360000154', '00-250130308', '00-360000241', '00-360000020', '00-360000283', '00-360000244', '00-360000238', '06-36-0000175', '06-36-0000194', '06-36-0000152', '06-36-0000156', '00-360000233', '00-360000236', '00-360000025', '00-360000165', '00-250130333', '00-250130341', '06-25-0071086', '00-250059909', '00-360000173', '06-25-0071072', '06-25-0071167', '06-25-0071026', '06-25-0071032', '00-360000237', '00-360000250', '00-450083448', '00-450083403', '00-450083446', '00-450083404', '00-450083445', '00-450083458', '00-250071700', '00-250071667', '00-360000077', '00-450083468', '00-250102673', '00-450083471', '00-360000092', '00-360000031', '06-36-0000228', '06-36-0000225', '00-360000032', '00-360000007', '00-360000229', '06-25-0071104', '06-25-0071098', '06-25-0071109', '06-25-0071166', '00-360000086', '06-36-0000218', '06-36-0000191', '06-36-0000196', '00-250071659', '06-36-0000107', '06-36-0000093', '06-36-0000132', '00-450083524', '00-450083462', '06-25-0068164', '06-36-0000182', '06-36-0000231', '00-250068171', '00-360000005', '00-250059706', '00-250068207', '00-250068193', '00-360000234', '00-450083464', '00-250059925', '00-360000264', '00-460000184', '00-250070993', '00-360000090', '06-36-0000202', '06-36-0000205', '00-250059914', '00-250130372', '00-250130373', '00-250060002', '00-250071550', '00-250071481', '00-250070992', '00-360000224', '01223187357-0001', '00-250068225', '06-25-0071186', '06-25-0068144', '00-450083559', '00-450086595', '00-450086518', '00-450086594', '00-451000364', '00-450086520', '00-451000347', '00-451000312', '00-451000263', '00-450086593', '00-450086534', '00-450086521', '00-450083489', '00-450083492', '00-450083431', '00-450086519', '00-450086529', '00-450086505', '00-450083472', '00-250059812', '06-36-0000157', '06-36-0000159', '06-36-0000172', '00-360000245', '06-36-0000199', '06-36-0000187', '06-36-0000155', '06-36-0000160', '00-360000281', '00-360000258', '06-36-0000222', '06-36-0000183', '00-450083490', '06-36-0000217', '06-36-0000197', '00-25007091', '00-25007234', '00-450086510', '00-451000476', '00-451000379', '00-250071408', '06-36-0000193', '06-36-0000181', '00-451000369', '00-450083491', '00-451000293', '00-451000483', '00-450083430', '00-451000262', '00-451000485', '06-36-0000166', '06-36-0000153', '06-36-0000170', '00-451000266', '00-451000392', '00-450086602', '06-36-0000004', '06-36-0000127', '00-250071678', '00-250059887', '00-250071582', '00-250059873', '00-250071681', '00-460003531', '06-25-0071057', '06-25-0071012', '00-250060022', '06-25-0068215', '06-25-0068202', '00-250071417', '00-360000208', '00-250071624', '00-250060000', '00-360000212', '00-450083463', '00-250068228', '00-250059999', '00-250059956', '06-25-0068198', '06-25-0068139', '06-25-0068153', '00-360000242', '00-360000243', '00-250059997', '06-25-0068218', '00-250071559', '00-250130312', '00-250130313', '00-360000262', '00-250059905', '00-250059906', '00-250071467', '00-251004421', '00-250059754', '00-250071694', '00-360000260', '00-360000259', '01221321192-0101', '01221321192-0102', '00-50151143003436', '00-250059813', '06-25-0068192', '00-360000213', '00-250059849', '00-250059998', '06-25-0071059', '06-25-0071165', '06-36-0000184', '06-36-0000189', '00-250130375', '06-25-0068212', '06-25-0068201', '00-250059765', '00-250059836', '00-250071647', '00-360000003', '00-250059803', '00-250059705', '00-450086512', '00-450086604', '00-450083467', '00-450083466', '00-450083465', '00-250059861', '00-450083429', '00-250130335', '06-25-0071031', '00-250130363', '00-250130362', '00-250130367', '00-250130361', '00-250130365', '00-450083469', '00-250102710', '00-250130378', '00-250130384', '00-360000282', '00-360000235', '06-25-0068146', '06-25-0068148', '06-25-0071097', '06-25-0068145', '06-36-0000177', '00-250059874', '00-250068217', '00-250068208', '00-250102696', '00-450086503', '00-450086517', '00-450086528', '00-450086506', '00-250068163', '00-250059778', '00-250059777', '06-25-0071055', '06-25-0071089', '00-250060001', '00-250130385', '00-250071424', '00-250059718', '00-250059766', '00-250059920', '00-250068174', '00-250130351', '00-250059871', '00-250059708', '00-250059960', '00-251003141', '00-251004437', '00-250059709', '00-250052077', '00-250102776', '06-25-0071373', '00-250071403', '00-250071434', '00-250068229', '00-250068230', '06-25-0071308', '06-25-0071330', '06-25-0071218', '06-25-0071356', '26-25-1004742', '06-25-0071357', '00-250071405', '00-250059783', '00-250059720', '06-25-0068226', '06-25-0068220', '00-250059949', '00-250059921', '00-250068162', '00-250068179', '00-250068172', '00-250068173', '00-250068158', '00-250059751', '00-250068160', '00-250059992', '00-250059855', '00-250071612', '00-250068071', '00-250070994', '00-250059719', '06-25-0059916', '06-25-0059915', '00-250068166', '00-250071495', '00-250059968', '00-250071692', '00-250130325', '00-250130324', '00-250071540', '00-250071531', '00-250071493', '00-250071557', '00-250071527', '00-250071521', '00-250071558', '00-250059819', '00-250059957', '00-250102801', '00-250102796', '00-250102794', '00-250102797', '00-250130380', '00-360000006', '06-25-0068219', '06-25-0068137', '00-250130381', '00-450083564', '00-450083563', '00-450083561', '00-450083562', '00-250071640', '00-250071632', '00-250059774', '00-250007125', '00-250059860', '00-250059858', '00-250130307', '00-250059798', '00-250102798', '00-250102800', '00-250102795', '00-250071438', '00-250071507', '00-250071504', '00-360000252', '00-250071440', '00-250071449', '00-250071453', '00-451000232', '00-250130309', '00-360000001', '00-250059996', '00-250071468', '00-250070999', '06-25-0071088', '06-25-0071029', '06-25-0071168', '06-25-0071099', '06-25-0071369', '00-360000014', '00-360000012', '00-250007109', '00-250102648', '06-25-0071087', '06-25-0071025', '00-250059818', '00-250130318', '00-250071509', '00-250130315', '00-250059948', '00-250130342', '26-25-1000081', '00-450083522', '00-360000214', '00-450083419', '00-450083417', '00-450083461', '00-250130369', '06-25-0068189', '00-251009804', '00-250130336', '00-250130370', '00-250130332', '00-250071465', '26-25-1008614', '06-25-0068150', '00-250130359', '00-250130358', '00-250130357', '00-250130356', '00-250130355', '00-250130376', '00-250130377', '06-25-0071151', '06-36-0000185', '06-36-0000203', '06-36-0000209', '00-250102644', '00-250102649', '00-360000015', '00-451000482', '00-451000433', '00-360000016', '00-450066877', '00-250071414', '00-251004064', '00-251009025', '00-250071196', '00-250071195', '00-250071155', '00-250071202', '00-360000317', '00-250071464', '00-250059775', '00-250059856', '00-450083470', '00-251009632', '06-25-0071153', '00-250059715', '00-251004431', '00-250059703', '00-250059816', '00-250071451', '00-250059707', '00-250059965', '00-250059839', '00-250059853', '00-250059859', '00-250068180', '00-250068149', '00-25-0071065', '00-250059848', '06-25-0071030', '06-25-0068197', '06-25-0068210', '00-250059807', '06-25-0071066', '06-25-0071061', '06-25-0071068', '06-25-0071091', '06-25-0071090', '06-25-0071074', '06-25-0071192', '00-250068165', '00-360000232', '00-250059954', '06-25-0071176', '00-250130368', '00-250071679', '00-250059851', '00-250059835', '00-250130379', '00-250071432', '00-250071442', '06-25-0071392', '06-25-0071310', '06-25-0071311', '06-25-0071336', '06-25-0071312', '00-250071469', '06-25-0068140', '06-25-0068175', '00-250071661', '00-250068177', '06-25-0068221', '06-25-0068188', '06-25-0068167', '06-25-0068227', '06-25-0068196', '06-25-0068154', '06-25-0068141', '06-25-0068135', '06-25-0068224', '00-250071625', '00-250059945', '00-250059928', '00-250059717', '00-250068159', '00-250130321', '00-250071510', '00-250130310', '00-250130316', '00-250071677', '00-250130317', '00-250130320', '00-250130314', '00-250071699', '00-250059917', '00-250059768', '00-250059758', '00-250059780', '00-250059959', '00-250059789', '00-250068161', '00-250059989', '00-250130382', '00-250102620', '06-25-0068157', '06-25-0068170', '06-25-0068143', '00-250130374', '00-250130340', '00-250130345', '00-250130344', '00-360000002', '00-250130339', '00-250059811', '00-250130327', '00-250059926', '00-250130343', '00-250059875', '00-250059852', '00-360000008', '00-250059801', '00-250059724', '00-250068231', '00-250068134', '00-250071622', '00-250071575', '00-250059701', '00-250059704', '00-250059823', '00-250059964', '00-250059975', '00-250059770', '00-250130383', '06-25-0071387', '00-250052085', '06-25-0071052', '00-250068203', '00-250059814', '00-250059784', '00-250059982', '00-250059710', '00-250130329', '00-250071581', '00-250130326', '00-250130306', '06-25-0071359', '06-25-0071394', '06-25-0071085', '06-25-0071073', '06-25-0071039', '06-25-0071096', '06-25-0071028', '06-25-0071071', '00-250070996', '00-250071547', '00-25-0059761', '06-25-0071183', '06-25-0071184', '06-25-0071190', '06-25-0071185', '06-25-0071189', '00-250071416', '06-25-0071144', '06-25-0071126', '06-25-0071122', '06-36-0000216', '06-25-0071004', '06-25-0071121', '06-25-0071138', '06-25-0071140', '06-25-0071172', '06-25-0071188', '06-25-0071368', '06-25-0071319', '06-25-0071171', '06-25-0071170', '00-250071549', '06-25-0071370', '06-25-0071371', '26-25-1008570', '00-250130331', '00-250130338', '00-250071696', '00-250059947', '00-250059841', '00-250130323', '00-250130337', '00-250059854', '00-250059961', '06-25-0071181', '06-25-0071182', '06-25-0071372', '06-25-0071005', '06-25-0071290', '06-25-0071355', '06-25-0071191', '06-25-0071175', '00-250068232']
led_list = ['00-250068136', '00-250071518', '00-250102669', '00-250124842', '00-250071520', '00-250068182', '00-250071525', '06-25-0068185', '00-250071427', '00-450083432', '00-250068169', '00-250068178', '00-250059834', '00-250070991', '00-250071407', '00-250071409', '00-250071428', '06-25-0071364', '06-25-0071217', '00-250071548', '00-250059976', '06-25-0068176', '00-250071459', '00-250059912', '00-250059913', '00-360000037', '00-360000154', '00-250130308', '00-360000241', '00-250130333', '00-250130341', '06-25-0071086', '00-250059909', '06-25-0071072', '06-25-0071167', '06-25-0071026', '06-25-0071032', '00-450083448', '00-450083403', '00-450083446', '00-450083404', '00-450083445', '00-450083458', '00-250071700', '00-250071667', '00-450083468', '00-250102673', '00-450083471', '06-25-0071104', '06-25-0071098', '06-25-0071109', '06-25-0071166', '06-36-0000218', '00-250071659', '06-25-0068164', '00-250068171', '00-250059706', '00-250068207', '00-250068193', '00-250059925', '00-460000184', '00-250070993', '00-250059914', '00-250130372', '00-250130373', '00-250060002', '00-250071550', '00-250071481', '00-250070992', '00-250068225', '06-25-0071186', '06-25-0068144', '00-450083559', '00-450086595', '00-450086518', '00-450086594', '00-451000364', '00-450086520', '00-451000347', '00-451000312', '00-451000263', '00-450086593', '00-450086534', '00-450086521', '00-450083489', '00-450083492', '00-450083431', '00-450086519', '00-450086529', '00-450086505', '00-450083472', '00-250059812', '00-360000245', '00-450083490', '00-450086510', '00-451000476', '00-451000379', '00-250071408', '00-451000369', '00-450083491', '00-451000293', '00-451000483', '00-450083430', '00-451000262', '00-451000485', '00-451000266', '00-451000392', '00-250071678', '00-250059887', '00-250071582', '00-250059873', '00-250071681', '00-460003531', '06-25-0071057', '06-25-0071012', '00-250060022', '06-25-0068215', '06-25-0068202', '00-250071417', '00-250071624', '00-250060000', '00-450083463', '00-250068228', '00-250059999', '00-250059956', '06-25-0068198', '06-25-0068139', '06-25-0068153', '00-250059997', '06-25-0068218', '00-250071559', '00-250130312', '00-250130313', '00-250059905', '00-250059906', '00-250071467', '00-251004421', '00-250059754', '00-250071694', '01221321192-0101', '01221321192-0102', '00-50151143003436', '00-250059813', '06-25-0068192', '00-250059849', '00-250059998', '06-25-0071059', '06-25-0071165', '00-250130375', '06-25-0068212', '06-25-0068201', '00-250059765', '00-250059836', '00-250071647', '00-250059803', '00-250059705', '00-450086512', '00-250059861', '00-450083429', '00-250130335', '06-25-0071031', '00-250130363', '00-250130362', '00-250130367', '00-250130361', '00-250130365', '00-250102710', '00-250130378', '00-250130384', '06-25-0068146', '06-25-0068148', '06-25-0071097', '06-25-0068145', '06-36-0000177', '00-250059874', '00-250068217', '00-250068208', '00-250102696', '00-450086503', '00-450086517', '00-450086528', '00-450086506', '00-250068163', '00-250059778', '00-250059777', '06-25-0071055', '06-25-0071089', '00-250060001', '00-250130385', '00-250071424', '00-250059718', '00-250059766', '00-250059920', '00-250068174', '00-250130351', '00-250059871', '00-250059708', '00-250059960', '00-251003141', '00-251004437', '00-250059709', '00-250052077', '00-250102776', '06-25-0071373', '00-250071403', '00-250071434', '00-250068229', '00-250068230', '06-25-0071308', '06-25-0071330', '06-25-0071218', '06-25-0071356', '26-25-1004742', '06-25-0071357', '00-250071405', '00-250059783', '00-250059720', '06-25-0068226', '06-25-0068220', '00-250059949', '00-250059921', '00-250068162', '00-250068179', '00-250068172', '00-250068173', '00-250068158', '00-250059751', '00-250068160', '00-250059992', '00-250059855', '00-250071612', '00-250068071', '00-250070994', '00-250059719', '06-25-0059916', '06-25-0059915', '00-250068166', '00-250071495', '00-250059968', '00-250071692', '00-250130325', '00-250130324', '00-250071540', '00-250071531', '00-250071493', '00-250071557', '00-250071527', '00-250071521', '00-250071558', '00-250059819', '00-250059957', '00-250102801', '00-250102796', '00-250102794', '00-250102797', '00-250130380', '06-25-0068219', '06-25-0068137', '00-250130381', '00-450083564', '00-450083563', '00-450083561', '00-450083562', '00-250071640', '00-250071632', '00-250059774', '00-250007125', '00-250059860', '00-250059858', '00-250130307', '00-250059798', '00-250102798', '00-250102800', '00-250102795', '00-250071438', '00-250071507', '00-250071504', '00-250071440', '00-250071449', '00-250071453', '00-451000232', '00-250130309', '00-250059996', '00-250071468', '00-250070999', '06-25-0071088', '06-25-0071029', '06-25-0071168', '06-25-0071099', '06-25-0071369', '00-360000014', '00-360000012', '00-250007109', '00-250102648', '06-25-0071087', '06-25-0071025', '00-250059818', '00-250130318', '00-250071509', '00-250130315', '00-250059948', '00-250130342', '26-25-1000081', '00-250130369', '06-25-0068189', '00-251009804', '00-250130336', '00-250130370', '00-250130332', '00-250071465', '26-25-1008614', '06-25-0068150', '00-250130359', '00-250130358', '00-250130357', '00-250130356', '00-250130355', '00-250130376', '00-250130377', '06-25-0071151', '00-250102644', '00-250102649', '00-360000015', '00-451000482', '00-451000433', '00-360000016', '00-450066877', '00-250071414', '00-251004064', '00-251009025', '00-250071196', '00-250071195', '00-250071155', '00-250071202', '00-360000317', '00-250071464', '00-250059775', '00-250059856', '00-251009632', '06-25-0071153', '00-250059715', '00-251004431', '00-250059703', '00-250059816', '00-250071451', '00-250059707', '00-250059965', '00-250059839', '00-250059853', '00-250059859', '00-250068180', '00-250068149', '00-25-0071065', '00-250059848', '06-25-0071030', '06-25-0068197', '06-25-0068210', '00-250059807', '06-25-0071066', '06-25-0071061', '06-25-0071068', '06-25-0071091', '06-25-0071090', '06-25-0071074', '06-25-0071192', '00-250068165', '00-250059954', '06-25-0071176', '00-250130368', '00-250071679', '00-250059851', '00-250059835', '00-250130379', '00-250071432', '00-250071442', '06-25-0071392', '06-25-0071310', '06-25-0071311', '06-25-0071336', '06-25-0071312', '00-250071469', '06-25-0068140', '06-25-0068175', '00-250071661', '00-250068177', '06-25-0068221', '06-25-0068188', '06-25-0068167', '06-25-0068227', '06-25-0068196', '06-25-0068154', '06-25-0068141', '06-25-0068135', '06-25-0068224', '00-250071625', '00-250059945', '00-250059928', '00-250059717', '00-250068159', '00-250130321', '00-250071510', '00-250130310', '00-250130316', '00-250071677', '00-250130317', '00-250130320', '00-250130314', '00-250071699', '00-250059917', '00-250059768', '00-250059758', '00-250059780', '00-250059959', '00-250059789', '00-250068161', '00-250059989', '00-250130382', '00-250102620', '06-25-0068157', '06-25-0068170', '06-25-0068143', '00-250130374', '00-250130340', '00-250130345', '00-250130344', '00-250130339', '00-250059811', '00-250130327', '00-250059926', '00-250130343', '00-250059875', '00-250059852', '00-250059801', '00-250059724', '00-250068231', '00-250068134', '00-250071622', '00-250071575', '00-250059701', '00-250059704', '00-250059823', '00-250059964', '00-250059975', '00-250059770', '00-250130383', '06-25-0071387', '00-250052085', '06-25-0071052', '00-250068203', '00-250059814', '00-250059784', '00-250059982', '00-250059710', '00-250130329', '00-250071581', '00-250130326', '00-250130306', '06-25-0071359', '06-25-0071394', '06-25-0071085', '06-25-0071073', '06-25-0071039', '06-25-0071096', '06-25-0071028', '06-25-0071071', '00-250070996', '00-250071547', '00-25-0059761', '06-25-0071183', '06-25-0071184', '06-25-0071190', '06-25-0071185', '06-25-0071189', '00-250071416', '06-25-0071144', '06-25-0071126', '06-25-0071122', '06-36-0000216', '06-25-0071004', '06-25-0071121', '06-25-0071138', '06-25-0071140', '06-25-0071172', '06-25-0071188', '06-25-0071368', '06-25-0071319', '06-25-0071171', '06-25-0071170', '00-250071549', '06-25-0071370', '06-25-0071371', '26-25-1008570', '00-250130331', '00-250130338', '00-250071696', '00-250059947', '00-250059841', '00-250130323', '00-250130337', '00-250059854', '00-250059961', '06-25-0071181', '06-25-0071182', '06-25-0071372', '06-25-0071005', '06-25-0071290', '06-25-0071355', '06-25-0071191', '06-25-0071175', '00-250068232']
inv_list = ['01223192096-0001', '00-360000221', '00-360000201', '01223187332-0001', '00-360000247', '00-360000020', '00-360000283', '00-360000244', '00-360000238', '06-36-0000175', '06-36-0000194', '06-36-0000152', '06-36-0000156', '00-360000233', '00-360000236', '00-360000025', '00-360000165', '00-360000173', '00-360000237', '00-360000250', '00-360000077', '00-360000092', '00-360000031', '06-36-0000228', '06-36-0000225', '00-360000032', '00-360000007', '00-360000229', '00-360000086', '06-36-0000191', '06-36-0000196', '06-36-0000107', '06-36-0000093', '06-36-0000132', '00-450083524', '00-450083462', '06-36-0000182', '06-36-0000231', '00-360000005', '00-360000234', '00-450083464', '00-360000264', '00-360000090', '06-36-0000202', '06-36-0000205', '00-360000224', '01223187357-0001', '06-36-0000157', '06-36-0000159', '06-36-0000172', '06-36-0000199', '06-36-0000187', '06-36-0000155', '06-36-0000160', '00-360000281', '00-360000258', '06-36-0000222', '06-36-0000183', '06-36-0000217', '06-36-0000197', '00-25007091', '00-25007234', '06-36-0000193', '06-36-0000181', '06-36-0000166', '06-36-0000153', '06-36-0000170', '00-450086602', '06-36-0000004', '06-36-0000127', '00-360000208', '00-360000212', '00-360000242', '00-360000243', '00-360000262', '00-360000260', '00-360000259', '00-360000213', '06-36-0000184', '06-36-0000189', '00-360000003', '00-450086604', '00-450083467', '00-450083466', '00-450083465', '00-450083469', '00-360000282', '00-360000235', '00-360000006', '00-360000252', '00-360000001', '00-450083522', '00-360000214', '00-450083419', '00-450083417', '00-450083461', '06-36-0000185', '06-36-0000203', '06-36-0000209', '00-450083470', '00-360000232', '00-360000002', '00-360000008']
good_list = ['00-250068136', '00-250071518', '00-250071520', '00-250071525', '00-250071548', '00-250130308', '00-360000241', '00-250130333', '00-250059909', '00-450083448', '00-450083403', '00-450083446', '00-450083404', '00-450083445', '00-450083458', '00-450083471', '00-360000031', '06-36-0000191', '06-36-0000196', '00-250071659', '00-250059706', '00-250059925', '00-460000184', '00-250060002', '06-25-0071186', '00-450083559', '00-450086595', '00-450086518', '00-450086594', '00-451000364', '00-450086520', '00-451000347', '00-451000312', '00-451000263', '00-450086593', '00-450086534', '00-450086521', '00-450083489', '00-450083492', '00-450083431', '00-450086519', '00-450086529', '00-450086505', '00-450083472', '00-360000245', '00-450083490', '00-450086510', '00-451000476', '00-451000379', '00-451000369', '00-450083491', '00-451000483', '00-450083430', '00-451000262', '00-451000485', '00-451000266', '00-451000392', '00-250060000', '00-450083463', '00-250059999', '00-250059956', '06-25-0068198', '06-25-0068139', '06-25-0068153', '00-250059997', '06-25-0068218', '00-250130312', '00-250130313', '00-360000262', '00-251004421', '00-250059813', '00-250059849', '00-250059998', '06-25-0071059', '06-25-0071165', '00-450083429', '00-250130384', '00-250071495', '00-250059819', '00-250130381', '00-450083564', '00-450083563', '00-450083561', '00-450083562', '00-360000014', '00-360000012', '00-250007109', '00-250102648', '06-25-0068189', '00-251009804', '00-250130336', '00-250130370', '00-250130332', '00-250130359', '00-250130358', '00-250130357', '00-250130356', '00-250130355', '00-250130376', '00-250130377', '00-360000015', '00-451000482', '00-451000433', '00-360000016', '00-450066877', '00-251004064', '00-251009025', '00-250071196', '00-250071195', '00-250071155', '00-250071202', '00-360000317', '00-250071464', '00-250059775', '00-251009632', '00-250059715', '00-250059703', '00-250059816', '00-250059707', '00-250059965', '00-250059839', '00-250059859', '00-250068180', '00-25-0071065', '00-250059848', '06-25-0071030', '00-250059807', '06-25-0071061', '06-25-0071068', '06-25-0071090', '06-25-0071074', '06-25-0071192', '00-250068165', '00-250059954', '06-25-0071176', '00-250059851', '00-250059835', '00-250130379', '00-250071432', '00-250071442', '00-250071469', '06-25-0068140', '06-25-0068175', '06-25-0068221', '06-25-0068188', '06-25-0068167', '06-25-0068227', '06-25-0068196', '06-25-0068154', '06-25-0068141', '06-25-0068135', '06-25-0068224', '00-250059945', '00-250059928', '00-250071510', '00-250130310', '00-250130316', '00-250071677', '00-250130317', '00-250130320', '00-250130314', '00-250071699', '00-250059917', '00-250059768', '00-250059758', '00-250059780', '00-250059959', '00-250059789', '00-250068161', '00-250059989', '00-250130382', '00-250102620', '06-25-0068157', '06-25-0068170', '06-25-0068143', '00-250130340', '00-250130345', '00-250130344', '00-250130339', '00-250130327', '00-250059926', '00-250130343', '00-250059875', '00-250059701', '00-250059704', '06-25-0071387', '00-250052085', '06-25-0071052', '00-250059784', '00-250059982', '00-250068182', '06-25-0068185', '00-250071427', '01223192096-0001', '00-360000221', '00-360000201', '01223187332-0001', '00-250068169', '00-250068178', '00-250059834', '00-250070991', '00-250071407', '00-250071409', '00-250071428', '00-360000247', '00-250059976', '06-25-0068176', '00-250071459', '00-360000020', '00-360000283', '00-360000244', '00-360000238', '06-36-0000175', '06-36-0000194', '06-36-0000152', '06-36-0000156', '00-360000233', '00-360000236', '00-360000025', '00-360000165', '00-250130341', '06-25-0071086', '00-360000173', '06-25-0071072', '06-25-0071167', '06-25-0071026', '06-25-0071032', '00-360000237', '00-360000250', '00-250071700', '00-250071667', '00-360000077', '00-450083468', '00-250102673', '00-360000092', '06-36-0000228', '06-36-0000225', '00-360000032', '00-360000007', '00-360000229', '06-25-0071104', '06-25-0071098', '06-25-0071109', '06-25-0071166', '00-360000086', '06-36-0000218', '06-36-0000107', '06-36-0000093', '06-36-0000132', '00-450083524', '06-25-0068164', '06-36-0000182', '06-36-0000231', '00-250068171', '00-360000005', '00-250068207', '00-250068193', '00-360000234', '00-450083464', '00-360000264', '00-250070993', '00-360000090', '06-36-0000202', '06-36-0000205', '00-250130372', '00-250130373', '00-250071550', '00-250071481', '00-250070992', '00-360000224', '01223187357-0001', '00-250068225', '06-25-0068144', '00-250059812', '06-36-0000157', '06-36-0000159', '06-36-0000172', '06-36-0000199', '06-36-0000187', '06-36-0000155', '06-36-0000160', '00-360000281', '00-360000258', '06-36-0000222', '06-36-0000183', '06-36-0000217', '06-36-0000197', '00-250071408', '06-36-0000193', '06-36-0000181', '06-36-0000166', '06-36-0000153', '06-36-0000170', '00-450086602', '06-36-0000004', '06-36-0000127', '00-250071678', '00-250059887', '00-250071582', '00-250059873', '00-250071681', '00-460003531', '06-25-0071057', '06-25-0071012', '00-250060022', '06-25-0068215', '06-25-0068202', '00-360000208', '00-250071624', '00-360000212', '00-360000242', '00-360000243', '00-250071559', '00-250059905', '00-250059906', '00-250071467', '00-250059754', '00-250071694', '00-360000260', '00-360000259', '06-25-0068192', '00-360000213', '06-36-0000184', '06-36-0000189', '00-250130375', '06-25-0068212', '06-25-0068201', '00-250059765', '00-250059836', '00-250071647', '00-360000003', '00-250059803', '00-250059705', '00-450086512', '00-450086604', '00-450083467', '00-450083466', '00-450083465', '00-250059861', '00-250130335', '06-25-0071031', '00-250130363', '00-250130362', '00-250130367', '00-250130361', '00-250130365', '00-450083469', '00-250102710', '00-250130378', '00-360000282', '00-360000235', '06-25-0068146', '06-25-0068148', '06-25-0071097', '06-25-0068145', '06-36-0000177', '00-250059874', '00-250068217', '00-250068208', '00-250102696', '00-450086503', '00-450086517', '00-450086528', '00-450086506', '00-250059778', '00-250059777', '06-25-0071055', '06-25-0071089', '00-250060001', '00-250130385', '00-250071424', '00-250059718', '00-250059766', '00-250059920', '00-250068174', '00-250130351', '00-250059871', '00-250059708', '00-250059960', '00-251003141', '00-251004437', '00-250059709', '00-250052077', '00-250102776', '06-25-0071373', '00-250071403', '00-250071434', '00-250068229', '00-250068230', '06-25-0071308', '06-25-0071330', '06-25-0071218', '06-25-0071356', '26-25-1004742', '06-25-0071357', '00-250071405', '00-250059783', '00-250059720', '06-25-0068226', '06-25-0068220', '00-250059949', '00-250059921', '00-250068162', '00-250068179', '00-250068172', '00-250068173', '00-250068158', '00-250059751', '00-250068160', '00-250059992', '00-250059855', '00-250071612', '00-250068071', '00-250070994', '00-250059719', '06-25-0059916', '06-25-0059915', '00-250068166', '00-250059968', '00-250071692', '00-250130325', '00-250130324', '00-250071540', '00-250071531', '00-250071493', '00-250071557', '00-250071527', '00-250071521', '00-250071558', '00-360000006', '06-25-0068219', '06-25-0068137', '00-250071640', '00-250071632', '00-250059774', '00-250007125', '00-250059860', '00-250059858', '00-250130307', '00-250059798', '00-250071438', '00-250071507', '00-250071504', '00-360000252', '00-250071440', '00-250071449', '00-250071453', '00-451000232', '00-250130309', '00-360000001', '00-250059996', '00-250071468', '00-250070999', '06-25-0071088', '06-25-0071029', '06-25-0071168', '06-25-0071099', '06-25-0071369', '06-25-0071087', '06-25-0071025', '00-250059818', '00-250130318', '00-250071509', '00-250130315', '00-250059948', '00-250130342', '26-25-1000081', '00-450083522', '00-360000214', '00-450083419', '00-450083417', '00-450083461', '00-250130369', '00-250071465', '26-25-1008614', '06-25-0068150', '06-25-0071151', '06-36-0000185', '06-36-0000203', '06-36-0000209', '00-250102644', '00-250102649', '00-250071414', '00-250059856', '06-25-0071153', '00-251004431', '00-250071451', '00-250059853', '00-250068149', '06-25-0068197', '06-25-0068210', '06-25-0071091', '00-360000232', '00-250130368', '00-250071679', '06-25-0071392', '06-25-0071310', '06-25-0071311', '06-25-0071336', '06-25-0071312', '00-250071661', '00-250068177', '00-250071625', '00-250059717', '00-250068159', '00-250130374', '00-360000002', '00-250059811', '00-250059852', '00-360000008', '00-250059801', '00-250059724', '00-250068231', '00-250068134', '00-250071622', '00-250071575', '00-250059823', '00-250059964', '00-250059975', '00-250059770', '00-250130383', '00-250068203', '00-250059814', '00-250071581', '00-250130326', '00-250130306', '06-25-0071359', '06-25-0071394', '06-25-0071085', '06-25-0071073', '06-25-0071039', '06-25-0071096', '06-25-0071028', '06-25-0071071', '00-250070996', '00-250071547', '00-25-0059761', '06-25-0071183', '06-25-0071184', '06-25-0071190', '06-25-0071185', '06-25-0071189', '00-250071416', '06-25-0071144', '06-25-0071126', '06-25-0071122', '06-36-0000216', '06-25-0071004', '06-25-0071121', '06-25-0071138', '06-25-0071140', '06-25-0071172', '06-25-0071188', '06-25-0071368', '06-25-0071319', '06-25-0071171', '06-25-0071170', '00-250071549', '06-25-0071370', '06-25-0071371', '26-25-1008570', '00-250130331', '00-250130338', '00-250071696', '00-250059947', '00-250059841', '00-250130323', '00-250059854', '00-250059961', '06-25-0071181', '06-25-0071182', '06-25-0071372', '06-25-0071005', '06-25-0071290', '06-25-0071355', '06-25-0071191', '06-25-0071175', '00-250068232', '00-450083432', '00-451000293']
bad_list = ['00-250102669', '00-250124842', '06-25-0071364', '06-25-0071217', '00-250059912', '00-250059913', '00-360000037', '00-360000154', '00-450083462', '00-250059914', '00-25007091', '00-25007234', '00-250071417', '00-250068228', '01221321192-0101', '01221321192-0102', '00-50151143003436', '00-250068163', '00-250059957', '00-250102801', '00-250102796', '00-250102794', '00-250102797', '00-250130380', '00-250102798', '00-250102800', '00-250102795', '00-450083470', '06-25-0071066', '00-250130321', '00-250059710', '00-250130329', '00-250130337']
nuri_list = ['00-250068136', '00-250071518', '00-250071520', '00-250068182', '00-250071525', '06-25-0068185', '00-250071427', '00-250068169', '00-250068178', '00-250059834', '00-250070991', '00-250071407', '00-250071409', '00-250071428', '00-360000247', '06-25-0071364', '06-25-0071217', '00-250071548', '00-250059976', '06-25-0068176', '00-250071459', '00-250130308', '00-360000241', '00-360000283', '00-360000244', '00-360000238', '06-36-0000175', '06-36-0000194', '06-36-0000152', '06-36-0000156', '00-360000233', '00-360000236', '00-250130341', '06-25-0071086', '06-25-0071072', '06-25-0071167', '06-25-0071026', '06-25-0071032', '00-360000237', '00-360000250', '00-450083448', '00-450083403', '00-450083446', '00-450083404', '00-450083445', '00-450083458', '00-250071700', '00-250071667', '06-36-0000228', '06-36-0000225', '06-25-0071104', '06-25-0071098', '06-25-0071109', '06-25-0071166', '06-36-0000218', '06-36-0000191', '06-36-0000196', '00-250071659', '06-36-0000107', '06-36-0000093', '06-36-0000132', '00-450083524', '00-450083462', '06-25-0068164', '06-36-0000182', '06-36-0000231', '00-250068171', '00-250059706', '00-250068207', '00-250068193', '00-360000234', '00-450083464', '00-250059925', '00-360000264', '00-250070993', '06-36-0000202', '06-36-0000205', '00-250060002', '00-250071550', '00-250071481', '00-250070992', '00-250068225', '06-25-0071186', '06-25-0068144', '00-250059812', '06-36-0000157', '06-36-0000159', '06-36-0000172', '00-360000245', '06-36-0000199', '06-36-0000187', '06-36-0000155', '06-36-0000160', '00-360000281', '00-360000258', '06-36-0000222', '06-36-0000183', '06-36-0000217', '06-36-0000197', '00-250071408', '06-36-0000193', '06-36-0000181', '06-36-0000166', '06-36-0000153', '06-36-0000170', '00-450086602', '06-36-0000004', '06-36-0000127', '00-250071678', '00-250059887', '00-250071582', '00-250059873', '00-250071681', '00-460003531', '06-25-0071057', '06-25-0071012', '00-250060022', '06-25-0068215', '06-25-0068202', '00-250071417', '00-250071624', '00-250060000', '00-450083463', '00-250059999', '00-250059956', '06-25-0068198', '06-25-0068139', '06-25-0068153', '00-360000242', '00-360000243', '00-250059997', '06-25-0068218', '00-250071559', '00-250130312', '00-250130313', '00-360000262', '00-250071467', '00-250059754', '00-250071694', '00-360000260', '00-360000259', '00-250059813', '06-25-0068192', '00-250059849', '00-250059998', '06-25-0071059', '06-25-0071165', '06-36-0000184', '06-36-0000189', '00-250130375', '06-25-0068212', '06-25-0068201', '00-250059765', '00-250059836', '00-250071647', '00-250059803', '00-250059705', '00-450086512', '00-450086604', '00-450083467', '00-450083466', '00-450083465', '00-250059861', '00-250130335', '06-25-0071031', '00-250130363', '00-250130362', '00-250130367', '00-250130361', '00-250130365', '00-450083469', '00-250102710', '00-250130378', '00-250130384', '00-360000282', '00-360000235', '06-25-0068146', '06-25-0068148', '06-25-0071097', '06-25-0068145', '06-36-0000177', '00-250059874', '00-250068217', '00-250068208', '00-450086503', '00-450086517', '00-450086528', '00-450086506', '00-250059778', '00-250059777', '06-25-0071055', '06-25-0071089', '00-250060001', '00-250130385', '00-250071424', '00-250059718', '00-250059766', '00-250059871', '00-250059708', '00-250059960', '00-250059709', '00-250052077', '00-250102776', '06-25-0071373', '00-250071403', '00-250071434', '00-250068229', '00-250068230', '06-25-0071308', '06-25-0071330', '06-25-0071218', '06-25-0071356', '26-25-1004742', '06-25-0071357', '00-250071405', '00-250059783', '00-250059720', '06-25-0068226', '06-25-0068220', '00-250059949', '00-250059921', '00-250068162', '00-250068179', '00-250068172', '00-250068173', '00-250068158', '00-250059751', '00-250068160', '00-250059992', '00-250059855', '00-250071612', '00-250068071', '00-250070994', '00-250059719', '06-25-0059916', '06-25-0059915', '00-250068166', '00-250071495', '00-250059968', '00-250071692', '00-250130325', '00-250130324', '00-250071540', '00-250071531', '00-250071493', '00-250071557', '00-250071527', '00-250071521', '00-250071558', '00-250059819', '00-250059957', '00-250130380', '06-25-0068219', '06-25-0068137', '00-250130381', '00-250071640', '00-250071632', '00-250059774', '00-250007125', '00-250059860', '00-250059858', '00-250130307', '00-250059798', '00-250071438', '00-250071507', '00-250071504', '00-360000252', '00-250071440', '00-250071449', '00-250071453', '00-250130309', '00-250059996', '00-250071468', '00-250070999', '06-25-0071088', '06-25-0071029', '06-25-0071168', '06-25-0071099', '06-25-0071369', '00-250102648', '06-25-0071087', '06-25-0071025', '00-250059818', '00-250130318', '00-250071509', '00-250130315', '00-250059948', '00-250130342', '26-25-1000081', '00-450083522', '00-450083419', '00-450083417', '00-450083461', '00-250130369', '06-25-0068189', '00-250071465', '26-25-1008614', '06-25-0068150', '00-250130359', '00-250130358', '00-250130357', '00-250130356', '00-250130355', '00-250130376', '00-250130377', '06-25-0071151', '06-36-0000185', '06-36-0000203', '06-36-0000209', '00-250102644', '00-250102649', '00-250071414', '00-250071464', '00-250059775', '00-250059856', '00-450083470', '06-25-0071153', '00-250059715', '00-250059703', '00-250059816', '00-250071451', '00-250059707', '00-250059965', '00-250059839', '00-250059853', '00-250059859', '00-250068180', '00-250068149', '00-25-0071065', '00-250059848', '06-25-0071030', '06-25-0068197', '06-25-0068210', '00-250059807', '06-25-0071066', '06-25-0071061', '06-25-0071068', '06-25-0071091', '06-25-0071090', '06-25-0071074', '06-25-0071192', '00-250068165', '00-360000232', '00-250059954', '06-25-0071176', '00-250130368', '00-250071679', '00-250059851', '00-250059835', '00-250130379', '00-250071432', '00-250071442', '06-25-0071392', '06-25-0071310', '06-25-0071311', '06-25-0071336', '06-25-0071312', '00-250071469', '06-25-0068140', '06-25-0068175', '00-250071661', '00-250068177', '06-25-0068221', '06-25-0068188', '06-25-0068167', '06-25-0068227', '06-25-0068196', '06-25-0068154', '06-25-0068141', '06-25-0068135', '06-25-0068224', '00-250071625', '00-250059945', '00-250059928', '00-250059717', '00-250068159', '00-250130321', '00-250071510', '00-250130310', '00-250130316', '00-250071677', '00-250130317', '00-250130320', '00-250130314', '00-250071699', '00-250059917', '00-250059768', '00-250059758', '00-250059780', '00-250059959', '00-250059789', '00-250068161', '00-250059989', '00-250130382', '00-250102620', '06-25-0068157', '06-25-0068170', '06-25-0068143', '00-250130374', '00-250130340', '00-250130345', '00-250130344', '00-250130339', '00-250059811', '00-250130327', '00-250059926', '00-250130343', '00-250059875', '00-250059852', '00-250059801', '00-250059724', '00-250068231', '00-250068134', '00-250071622', '00-250071575', '00-250059701', '00-250059704', '00-250059823', '00-250059964', '00-250059975', '00-250059770', '00-250130383', '06-25-0071387', '00-250052085', '06-25-0071052', '00-250068203', '00-250059814', '00-250059784', '00-250059982', '00-250071581', '00-250130326', '00-250130306', '06-25-0071359', '06-25-0071394', '06-25-0071085', '06-25-0071073', '06-25-0071039', '06-25-0071096', '06-25-0071028', '06-25-0071071', '00-250070996', '00-250071547', '00-25-0059761', '06-25-0071183', '06-25-0071184', '06-25-0071190', '06-25-0071185', '06-25-0071189', '00-250071416', '06-25-0071144', '06-25-0071126', '06-25-0071122', '06-36-0000216', '06-25-0071004', '06-25-0071121', '06-25-0071138', '06-25-0071140', '06-25-0071172', '06-25-0071188', '06-25-0071368', '06-25-0071319', '06-25-0071171', '06-25-0071170', '00-250071549', '06-25-0071370', '06-25-0071371', '26-25-1008570', '00-250130331', '00-250130338', '00-250071696', '00-250059947', '00-250059841', '00-250130323', '00-250059854', '00-250059961', '06-25-0071181', '06-25-0071182', '06-25-0071372', '06-25-0071005', '06-25-0071290', '06-25-0071355', '06-25-0071191', '06-25-0071175', '00-250068232']
db_list = ['00-250102669', '00-250124842', '00-450083432', '00-250059912', '00-250059913', '00-360000037', '00-360000154', '00-250130333', '00-250059909', '00-450083468', '00-250102673', '00-450083471', '00-460000184', '00-250059914', '00-250130372', '00-250130373', '00-450083559', '00-450086595', '00-450086518', '00-450086594', '00-451000364', '00-450086520', '00-451000347', '00-451000312', '00-451000263', '00-450086593', '00-450086534', '00-450086521', '00-450083489', '00-450083492', '00-450083431', '00-450086519', '00-450086529', '00-450086505', '00-450083472', '00-450083490', '00-25007091', '00-25007234', '00-450086510', '00-451000476', '00-451000379', '00-451000369', '00-450083491', '00-451000293', '00-451000483', '00-450083430', '00-451000262', '00-451000485', '00-451000266', '00-451000392', '00-250068228', '00-250059905', '00-250059906', '00-251004421', '01221321192-0101', '01221321192-0102', '00-50151143003436', '00-450083429', '00-250102696', '00-250068163', '00-250059920', '00-250068174', '00-250130351', '00-251003141', '00-251004437', '00-250102801', '00-250102796', '00-250102794', '00-250102797', '00-450083564', '00-450083563', '00-450083561', '00-450083562', '00-250102798', '00-250102800', '00-250102795', '00-451000232', '00-360000014', '00-360000012', '00-250007109', '00-251009804', '00-250130336', '00-250130370', '00-250130332', '00-360000015', '00-451000482', '00-451000433', '00-360000016', '00-450066877', '00-251004064', '00-251009025', '00-250071196', '00-250071195', '00-250071155', '00-250071202', '00-360000317', '00-251009632', '00-251004431', '00-250059710', '00-250130329', '00-250130337', '01223192096-0001', '00-360000221', '00-360000201', '01223187332-0001', '00-360000020', '00-360000025', '00-360000165', '00-360000173', '00-360000077', '00-360000092', '00-360000031', '00-360000032', '00-360000007', '00-360000229', '00-360000086', '00-360000005', '00-360000090', '00-360000224', '01223187357-0001', '00-360000208', '00-360000212', '00-360000213', '00-360000003', '00-360000006', '00-360000001', '00-360000214', '00-360000002', '00-360000008']
unique_list = ['00-250068136', '00-250071518', '00-250071520', '00-250068182', '00-250071525', '06-25-0068185', '00-250071427', '00-450083432', '00-250068169', '00-250068178', '00-250059834', '00-360000247', '00-250071548', '00-250059976', '06-25-0068176', '00-250071459', '00-250130308', '00-360000241', '00-360000020', '00-360000025', '00-360000165', '00-250130333', '00-250130341', '06-25-0071086', '00-250059909', '00-360000173', '00-360000077', '00-450083468', '00-250102673', '00-450083471', '00-360000092', '00-360000031', '00-360000032', '00-360000007', '00-360000229', '00-360000086', '06-36-0000218', '00-250071659', '00-450083524', '00-450083462', '06-25-0068164', '00-250068171', '00-360000005', '00-250059706', '00-360000234', '00-450083464', '00-250059925', '00-360000264', '00-460000184', '00-250070993', '00-360000090', '00-250059914', '00-250060002', '00-250070992', '00-250068225', '06-25-0071186', '06-25-0068144', '00-450083559', '00-450086595', '00-450086518', '00-450086594', '00-451000364', '00-450086520', '00-450086593', '00-450086534', '00-450086521', '00-450083489', '00-450083492', '00-450083431', '00-450086519', '00-450086529', '00-450086505', '00-450083472', '00-250059812', '00-360000245', '00-450083490', '00-450086510', '00-451000476', '00-451000379', '00-250071408', '00-451000369', '00-450083491', '00-451000293', '00-451000483', '00-450083430', '00-451000262', '00-451000485', '00-451000266', '00-451000392', '00-450086602', '00-250071681', '00-460003531', '00-250060022', '00-250071417', '00-360000208', '00-250071624', '00-250060000', '00-360000212', '00-450083463', '00-250068228', '00-250059999', '00-250059956', '00-250059997', '06-25-0068218', '00-250071559', '00-360000262', '00-250071467', '00-251004421', '00-360000260', '00-360000259', '00-250059813', '06-25-0068192', '00-360000213', '00-250059849', '00-250059998', '00-250130375', '00-250071647', '00-360000003', '00-250059803', '00-250059705', '00-450086512', '00-250059861', '00-450083429', '00-250130335', '06-25-0071031', '00-250130365', '00-450083469', '00-250102710', '00-250130378', '00-250130384', '06-36-0000177', '00-250059874', '00-250068217', '00-250068208', '00-250102696', '00-250068163', '06-25-0071055', '06-25-0071089', '00-250060001', '00-250130385', '00-250071424', '00-250059718', '00-250059766', '00-250059871', '00-250059708', '00-250059709', '00-250052077', '00-250102776', '06-25-0071373', '00-250071403', '00-250071434', '00-250068229', '00-250068230', '06-25-0071218', '06-25-0071356', '26-25-1004742', '06-25-0071357', '00-250071405', '00-250068172', '00-250068173', '00-250068158', '00-250059751', '00-250068071', '00-250070994', '00-250059719', '00-250068166', '00-250071495', '00-250059819', '00-250059957', '00-250130380', '00-360000006', '00-250130381', '00-250007125', '00-250130307', '00-250059798', '00-250071438', '00-360000252', '00-451000232', '00-250130309', '00-360000001', '00-250059996', '00-250071468', '00-250070999', '06-25-0071369', '00-250102648', '00-250059818', '00-250059948', '00-250130342', '26-25-1000081', '00-450083522', '00-360000214', '00-250130369', '06-25-0068189', '00-251009804', '00-250130336', '00-250130370', '00-250130332', '00-250071465', '06-25-0071151', '00-360000015', '00-250071414', '00-250071464', '00-250059775', '00-250059856', '00-450083470', '00-251009632', '06-25-0071153', '00-250059715', '00-251004431', '00-250059703', '00-250059816', '00-250071451', '00-250059707', '00-250059965', '00-250059839', '00-250059853', '00-250059859', '00-250068180', '00-250068149', '00-25-0071065', '00-250059848', '06-25-0071030', '00-250059807', '06-25-0071066', '06-25-0071091', '06-25-0071090', '06-25-0071074', '06-25-0071192', '00-250068165', '00-360000232', '00-250059954', '06-25-0071176', '00-250130368', '00-250071679', '00-250059851', '00-250059835', '00-250130379', '00-250071432', '00-250071442', '00-250071469', '06-25-0068140', '06-25-0068175', '06-25-0068167', '06-25-0068227', '06-25-0068196', '06-25-0068154', '06-25-0068224', '00-250071625', '00-250059945', '00-250059928', '00-250130321', '00-250071510', '00-250130310', '00-250130316', '00-250071677', '00-250130317', '00-250130320', '00-250130314', '00-250071699', '00-250059917', '00-250059768', '00-250059758', '00-250059780', '00-250059959', '00-250059789', '00-250130382', '00-250102620', '06-25-0068157', '06-25-0068170', '06-25-0068143', '00-250130374', '00-250130340', '00-250130345', '00-250130344', '00-360000002', '00-250130339', '00-250059811', '00-250130327', '00-250059926', '00-250130343', '00-250059875', '00-250059852', '00-360000008', '00-250130383', '06-25-0071387', '00-250052085', '06-25-0071052', '00-250059710', '00-250130329', '00-250071581', '00-25-0059761', '00-250071416', '06-36-0000216', '06-25-0071171', '06-25-0071170', '00-250071549', '26-25-1008570', '00-250130331', '00-250130338', '00-250071696', '00-250059947', '00-250059841', '00-250130323', '00-250130337', '00-250059854', '00-250059961', '06-25-0071372', '06-25-0071005', '00-250068232']
non_unique_list = ['00-250102669', '00-250124842', '01223192096-0001', '00-360000221', '00-360000201', '01223187332-0001', '00-250070991', '00-250071407', '00-250071409', '00-250071428', '06-25-0071364', '06-25-0071217', '00-250059912', '00-250059913', '00-360000037', '00-360000154', '00-360000283', '00-360000244', '00-360000238', '06-36-0000175', '06-36-0000194', '06-36-0000152', '06-36-0000156', '00-360000233', '00-360000236', '06-25-0071072', '06-25-0071167', '06-25-0071026', '06-25-0071032', '00-360000237', '00-360000250', '00-450083448', '00-450083403', '00-450083446', '00-450083404', '00-450083445', '00-450083458', '00-250071700', '00-250071667', '06-36-0000228', '06-36-0000225', '06-25-0071104', '06-25-0071098', '06-25-0071109', '06-25-0071166', '06-36-0000191', '06-36-0000196', '06-36-0000107', '06-36-0000093', '06-36-0000132', '06-36-0000182', '06-36-0000231', '00-250068207', '00-250068193', '06-36-0000202', '06-36-0000205', '00-250130372', '00-250130373', '00-250071550', '00-250071481', '00-360000224', '01223187357-0001', '00-451000347', '00-451000312', '00-451000263', '06-36-0000157', '06-36-0000159', '06-36-0000172', '06-36-0000199', '06-36-0000187', '06-36-0000155', '06-36-0000160', '00-360000281', '00-360000258', '06-36-0000222', '06-36-0000183', '06-36-0000217', '06-36-0000197', '00-25007091', '00-25007234', '06-36-0000193', '06-36-0000181', '06-36-0000166', '06-36-0000153', '06-36-0000170', '06-36-0000004', '06-36-0000127', '00-250071678', '00-250059887', '00-250071582', '00-250059873', '06-25-0071057', '06-25-0071012', '06-25-0068215', '06-25-0068202', '06-25-0068198', '06-25-0068139', '06-25-0068153', '00-360000242', '00-360000243', '00-250130312', '00-250130313', '00-250059905', '00-250059906', '00-250059754', '00-250071694', '01221321192-0101', '01221321192-0102', '00-50151143003436', '06-25-0071059', '06-25-0071165', '06-36-0000184', '06-36-0000189', '06-25-0068212', '06-25-0068201', '00-250059765', '00-250059836', '00-450086604', '00-450083467', '00-450083466', '00-450083465', '00-250130363', '00-250130362', '00-250130367', '00-250130361', '00-360000282', '00-360000235', '06-25-0068146', '06-25-0068148', '06-25-0071097', '06-25-0068145', '00-450086503', '00-450086517', '00-450086528', '00-450086506', '00-250059778', '00-250059777', '00-250059920', '00-250068174', '00-250130351', '00-250059960', '00-251003141', '00-251004437', '06-25-0071308', '06-25-0071330', '00-250059783', '00-250059720', '06-25-0068226', '06-25-0068220', '00-250059949', '00-250059921', '00-250068162', '00-250068179', '00-250068160', '00-250059992', '00-250059855', '00-250071612', '06-25-0059916', '06-25-0059915', '00-250059968', '00-250071692', '00-250130325', '00-250130324', '00-250071540', '00-250071531', '00-250071493', '00-250071557', '00-250071527', '00-250071521', '00-250071558', '00-250102801', '00-250102796', '00-250102794', '00-250102797', '06-25-0068219', '06-25-0068137', '00-450083564', '00-450083563', '00-450083561', '00-450083562', '00-250071640', '00-250071632', '00-250059774', '00-250059860', '00-250059858', '00-250102798', '00-250102800', '00-250102795', '00-250071507', '00-250071504', '00-250071440', '00-250071449', '00-250071453', '06-25-0071088', '06-25-0071029', '06-25-0071168', '06-25-0071099', '00-360000014', '00-360000012', '00-250007109', '06-25-0071087', '06-25-0071025', '00-250130318', '00-250071509', '00-250130315', '00-450083419', '00-450083417', '00-450083461', '26-25-1008614', '06-25-0068150', '00-250130359', '00-250130358', '00-250130357', '00-250130356', '00-250130355', '00-250130376', '00-250130377', '06-36-0000185', '06-36-0000203', '06-36-0000209', '00-250102644', '00-250102649', '00-451000482', '00-451000433', '00-360000016', '00-450066877', '00-251004064', '00-251009025', '00-250071196', '00-250071195', '00-250071155', '00-250071202', '00-360000317', '06-25-0068197', '06-25-0068210', '06-25-0071061', '06-25-0071068', '06-25-0071392', '06-25-0071310', '06-25-0071311', '06-25-0071336', '06-25-0071312', '00-250071661', '00-250068177', '06-25-0068221', '06-25-0068188', '06-25-0068141', '06-25-0068135', '00-250059717', '00-250068159', '00-250068161', '00-250059989', '00-250059801', '00-250059724', '00-250068231', '00-250068134', '00-250071622', '00-250071575', '00-250059701', '00-250059704', '00-250059823', '00-250059964', '00-250059975', '00-250059770', '00-250068203', '00-250059814', '00-250059784', '00-250059982', '00-250130326', '00-250130306', '06-25-0071359', '06-25-0071394', '06-25-0071085', '06-25-0071073', '06-25-0071039', '06-25-0071096', '06-25-0071028', '06-25-0071071', '00-250070996', '00-250071547', '06-25-0071183', '06-25-0071184', '06-25-0071190', '06-25-0071185', '06-25-0071189', '06-25-0071144', '06-25-0071126', '06-25-0071122', '06-25-0071004', '06-25-0071121', '06-25-0071138', '06-25-0071140', '06-25-0071172', '06-25-0071188', '06-25-0071368', '06-25-0071319', '06-25-0071370', '06-25-0071371', '06-25-0071181', '06-25-0071182', '06-25-0071290', '06-25-0071355', '06-25-0071191', '06-25-0071175']
| 2,787.333333
| 10,020
| 0.675995
| 7,020
| 50,172
| 4.829772
| 0.089886
| 0.074915
| 0.002212
| 0.002507
| 0.968589
| 0.947618
| 0.932871
| 0.901578
| 0.888217
| 0.859431
| 0
| 0.718808
| 0.061947
| 50,172
| 17
| 10,021
| 2,951.294118
| 0.001594
| 0
| 0
| 0
| 0
| 0
| 0.752966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
7cae8af3ecf6ad14544e7ffca886edbec399e2ab
| 143,383
|
py
|
Python
|
vb2py/test_at_scale/testbadcodes.py
|
ceprio/xl_vb2py
|
899fec0301140fd8bd313e8c80b3fa839b3f5ee4
|
[
"BSD-3-Clause"
] | null | null | null |
vb2py/test_at_scale/testbadcodes.py
|
ceprio/xl_vb2py
|
899fec0301140fd8bd313e8c80b3fa839b3f5ee4
|
[
"BSD-3-Clause"
] | null | null | null |
vb2py/test_at_scale/testbadcodes.py
|
ceprio/xl_vb2py
|
899fec0301140fd8bd313e8c80b3fa839b3f5ee4
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from vb2py.test_at_scale import file_tester
class Test_badcodes(file_tester.FileTester):
def test0(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Interface/IDestructor.cls')
def test1(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Interface/IObject.cls')
def test2(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Interface/IObjectFirend.cls')
def test3(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Interface/IContructor.cls')
def test4(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/DirSelecter.frm')
def test5(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/About Dialog.frm')
def test6(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Splash Screen.frm')
def test7(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Dialog.frm')
def test8(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/dlgProperty.frm')
def test9(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/ODBC Log In.frm')
def test10(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/StringMap Editor.frm')
def test11(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Web Browser.frm')
def test12(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Log in Dialog.frm')
def test13(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Tip of the Day.frm')
def test14(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/Options Dialog.frm')
def test15(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Forms/KeyValue Editor.frm')
def test16(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/dhtml.bas')
def test17(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/LProject.frm')
def test18(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/frmdaten.frm')
def test19(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/Form1.frm')
def test20(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/cHandle.cls')
def test21(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/LProjectMain.frm')
def test22(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/addin.frm')
def test23(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/LProjectMain.bas')
def test24(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/CApp.cls')
def test25(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Projects/LProjectApp.cls')
def test26(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Controls/Mover ListBox.frm')
def test27(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Controls/Treeview Listview Splitter.frm')
def test28(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Controls/Button ListBox.frm')
def test29(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/File Menu.frm')
def test30(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/Window Menu.frm')
def test31(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/Edit Menu.frm')
def test32(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/Help Menu.frm')
def test33(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/View Menu.frm')
def test34(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Template]/Menus/Explorer File Menu.frm')
def test35(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/netCanoe/clsLakefront.cls')
def test36(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/PartitionTextFile/mainFrm.frm')
def test37(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipLoader/zipLoader.bas')
def test38(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BookManager/frmOptions.frm')
def test39(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BookManager/frmMain.frm')
def test40(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Lookout/MainFrm.frm')
def test41(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Lookout/lookout.bas')
def test42(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Lookout/trayform.frm')
def test43(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Delpdg/Deletepdg.frm')
def test44(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/frmOptions.frm')
def test45(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CDeclaration.bas')
def test46(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CArchive.cls')
def test47(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/modZhReader.bas')
def test48(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Mainfrm.frm')
def test49(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/modZhSReader.bas')
def test50(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CDocumentInfo.cls')
def test51(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CStringPair.cls')
def test52(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CURLInfo.cls')
def test53(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/modZhPReader.bas')
def test54(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/modShareFunction.bas')
def test55(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Class1.cls')
def test56(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/frmServer.frm')
def test57(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/frmList.frm')
def test58(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/CProcesser.cls')
def test59(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/frmBookmark.frm')
def test60(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/MDefinition.bas')
def test61(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/CReader.cls')
def test62(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/IDocumentHandler.cls')
def test63(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/IReader.cls')
def test64(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/IViewerHandler.cls')
def test65(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/CDocument.cls')
def test66(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/IReaderHandler.cls')
def test67(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArchReader/Core/IViewer.cls')
def test68(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FolderInstr/MainFrm.frm')
def test69(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Packpdg/packpdgResult.frm')
def test70(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Packpdg/packpdg.frm')
def test71(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/sslibExplorer/frmOptions.frm')
def test72(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/sslibExplorer/frmBrowser.frm')
def test73(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/sslibExplorer/MainV2.frm')
def test74(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/sslibExplorer/frmTask3.frm')
def test75(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lContextMenu/Handler.cls')
def test76(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lContextMenu/mdlFilesFromIDO.bas')
def test77(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lContextMenu/mdlFunctions.bas')
def test78(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lContextMenu/IContextMenuCallback.cls')
def test79(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lContextMenu/mdlContextMenu.bas')
def test80(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/ILString.cls')
def test81(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/CTest.cls')
def test82(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/MTest.bas')
def test83(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/IArchiveItem.cls')
def test84(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/CDummy.cls')
def test85(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/IUnzip.cls')
def test86(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/toolKitIDLgen/IArchiveItems.cls')
def test87(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/oevbext/imageGarden.cls')
def test88(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/MainFrm.frm')
def test89(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/CLiNInI.cls')
def test90(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/CiniText.cls')
def test91(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/CAutoSetting.cls')
def test92(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/gCString.Bas')
def test93(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SSDownload/CSetting.cls')
def test94(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GlobalWizard/GlobWiz.frm')
def test95(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/textPdgMerger/frmMain.frm')
def test96(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FolderBrowser/FolderBrowser.cls')
def test97(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FolderBrowser/Test.bas')
def test98(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FolderBrowser/FolderBrowser.bas')
def test99(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Netcat/frmMain.frm')
def test100(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BlogText/BlogSave.frm')
def test101(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BlogText/BlogWrite.frm')
def test102(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/chmMake/chmmake.frm')
def test103(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/chmMake/ChmMake.bas')
def test104(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/frmOptions.frm')
def test105(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy of frmList.frm')
def test106(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy (2) of Mainfrm.frm')
def test107(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy of frmBookmark.frm')
def test108(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/modZhReader.bas')
def test109(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Mainfrm.frm')
def test110(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy of frmOptions.frm')
def test111(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/modZhSReader.bas')
def test112(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy of frmServer.frm')
def test113(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/IReader.cls')
def test114(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/modZhPReader.bas')
def test115(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/modShareFunction.bas')
def test116(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/frmServer.frm')
def test117(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/frmList.frm')
def test118(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/frmBookmark.frm')
def test119(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zhReader/Source/Copy of Mainfrm.frm')
def test120(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssLibQuery/Dialog.frm')
def test121(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssLibQuery/MainFrm2.frm')
def test122(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LTest/modTest.bas')
def test123(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fileinstr/MainFrm.frm')
def test124(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/CreateFolderIndex/MMAIN.bas')
def test125(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/fgbookUrl/CbookUrl.cls')
def test126(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/fgbookUrl/MBookUrl.bas')
def test127(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/fgbookUrl/CHander.cls')
def test128(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/fgbookUrl/cmdFrm.frm')
def test129(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lin-zip/modReadFunc.bas')
def test130(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lin-zip/Mlin-zip.bas')
def test131(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lin-zip/IInternetProtocolInfo.cls')
def test132(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lin-zip/zipHandler.cls')
def test133(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/DevFormat/frmMain.frm')
def test134(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CProduct.cls')
def test135(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CJob.cls')
def test136(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CWorks.cls')
def test137(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CWorkFlow.cls')
def test138(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CMoney.cls')
def test139(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CWorkRecord.cls')
def test140(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/Main.frm')
def test141(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Fac/CWorker.cls')
def test142(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SyncDirectory/frmMain.frm')
def test143(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/SyncDirectory/CApp.cls')
def test144(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Language.bas')
def test145(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringCollection.cls')
def test146(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/AppendString.cls')
def test147(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Localizer.cls')
def test148(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/String.bas')
def test149(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/TestCStringMap.bas')
def test150(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Rss.cls')
def test151(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Dumper.bas')
def test152(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Sort.Bas')
def test153(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Bytes.bas')
def test154(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Lpffile.bas')
def test155(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HttpHeader.cls')
def test156(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/bookUrl.cls')
def test157(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/AssistComboBox.cls')
def test158(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringMap.cls')
def test159(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/IWinHttpClient.cls')
def test160(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinHttpDownloadEx.cls')
def test161(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FolderBrowser.cls')
def test162(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinHttpSimple.cls')
def test163(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/LiNInI_Standalone.cls')
def test164(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Random.Bas')
def test165(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/UTF8.cls')
def test166(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Strings.Bas')
def test167(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MDebug.bas')
def test168(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Timer.bas')
def test169(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Download.cls')
def test170(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Unzip.bas')
def test171(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MRU.cls')
def test172(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/LiNInI.cls')
def test173(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/App.cls')
def test174(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinHTTP.cls')
def test175(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/QuickWork.bas')
def test176(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/IDownloadClient.cls')
def test177(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Layout.bas')
def test178(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/7zip.bas')
def test179(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/AppSetting.bas')
def test180(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MFileSystem.bas')
def test181(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MYFAV.cls')
def test182(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/DGSwsHTTP.cls')
def test183(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Registry.cls')
def test184(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/LiNInI_ISetting.cls')
def test185(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Forms.bas')
def test186(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ommonDialogLite.cls')
def test187(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MRUFileList.cls')
def test188(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ICharStream.cls')
def test189(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Parse.Bas')
def test190(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Subclass.cls')
def test191(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Map.cls')
def test192(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MUtility.bas')
def test193(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StdFonts.bas')
def test194(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/DownloadEx.cls')
def test195(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/omboboxHelper.bas')
def test196(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FileEncrypt.bas')
def test197(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/PrivateProfile.bas')
def test198(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/iniText.cls')
def test199(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MakeZhComment.cls')
def test200(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinHttpDownload.cls')
def test201(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/form.bas')
def test202(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MenuArrHandle.cls')
def test203(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Zlib.bas')
def test204(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/gCIni.Bas')
def test205(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Windows.bas')
def test206(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ApiHelper.bas')
def test207(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Algorithms.bas')
def test208(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/IWinHttp.cls')
def test209(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MZlib.bas')
def test210(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/TempFile.cls')
def test211(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Utility.bas')
def test212(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/PathCollection.cls')
def test213(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FolderBrowser.bas')
def test214(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Main.bas')
def test215(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/IApp.cls')
def test216(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/AutoSetting.cls')
def test217(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FileExecutor.bas')
def test218(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/gcHtmlWeb.Bas')
def test219(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/VBSetting.cls')
def test220(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Object.cls')
def test221(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/App.bas')
def test222(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/DlgOpenDir.bas')
def test223(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Timer.cls')
def test224(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Unzip.cls')
def test225(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/String.cls')
def test226(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FILEFUNC.BAS')
def test227(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/SSRLib.bas')
def test228(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/VBSource.bas')
def test229(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ObjectHelper.bas')
def test230(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/dao.bas')
def test231(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MParse.bas')
def test232(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ReaderViaHtml.cls')
def test233(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ISubclass.cls')
def test234(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FileDialog.cls')
def test235(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/LightShell.bas')
def test236(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ISetting.cls')
def test237(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringBuilder.cls')
# Has variable called Property
# def test238(self):
# self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HtmlWeb.bas')
def test239(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HttpHeader.bas')
def test240(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/CommonDialog.cls')
def test241(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/InterNetGet.cls')
def test242(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Internet.bas')
def test243(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ApiFileSystem.bas')
def test244(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Ini.bas')
def test245(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/System.bas')
def test246(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinHttpSimple_OLD.cls')
def test247(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/lassicIO.bas')
def test248(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/gCString.Bas')
def test249(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/UseZipProtocol.bas')
def test250(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Shell32.bas')
def test251(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Debug.bas')
def test252(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ApiTime.bas')
def test253(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/MFolderBrowser.bas')
def test254(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/DownloadProgress.cls')
def test255(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HTMLProperty.cls')
def test256(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HM.bas')
def test257(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/VBExe.bas')
def test258(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HtmlDOM.bas')
def test259(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Zlib.cls')
def test260(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ByteString.cls')
def test261(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Data Source.cls')
def test262(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StrCollection.cls')
def test263(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/gCShell32.Bas')
def test264(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ToPY.bas')
def test265(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Music.bas')
def test266(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringChinese.cls')
def test267(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringAnalyser.cls')
def test268(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FileStreamIn.cls')
def test269(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringLink.cls')
def test270(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Registry.bas')
def test271(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/omplex Data Consumer.cls')
def test272(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/HTTPConnection.cls')
def test273(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/CommonDialogLite.cls')
def test274(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/File.bas')
def test275(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Subclass.Bas')
def test276(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/WinInet.bas')
def test277(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/FileSystem.bas')
def test278(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Arrays.bas')
def test279(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Setting.cls')
def test280(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/RecentFile.cls')
def test281(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/StringBuilder(Copyed).cls')
def test282(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/Reg.cls')
def test283(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoZipItem.cls')
def test284(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfozipShared.bas')
def test285(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoUnzip.bas')
def test286(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoZip.bas')
def test287(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoUnZip.cls')
def test288(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoZipItems.cls')
def test289(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/infozip/InfoZip.cls')
def test290(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/BookURL.cls')
def test291(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/BookTask.cls')
def test292(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/TaskRuner.cls')
def test293(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/BookTester.cls')
def test294(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/BookInfo.cls')
def test295(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/TaskDownloader_OLD.cls')
def test296(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/SSReader.bas')
def test297(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/frmMain.frm')
def test298(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/TaskDownloader.cls')
def test299(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/ITaskNotify.cls')
def test300(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/ssreader/JPGBookHandler.cls')
def test301(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/zbook/ZBookMake.bas')
def test302(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/zbook/ZBookTemplate.bas')
def test303(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/zbook/ZBookmark.cls')
def test304(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/zbook/ZBookComment.cls')
def test305(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/LocalModules/zbook/ZBook.bas')
def test306(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Forms/ProgressDialog.frm')
def test307(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Forms/InputBox.frm')
def test308(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GToPY.cls')
def test309(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GVBSource.cls')
def test310(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Language.bas')
def test311(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringCollection.cls')
def test312(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFILEFUNC.cls')
def test313(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/AppendString.cls')
def test314(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Localizer.cls')
def test315(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GApiHelper.cls')
def test316(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/String.bas')
def test317(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GLpffile.cls')
def test318(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GForms.cls')
def test319(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GParse.cls')
def test320(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/TestCStringMap.bas')
def test321(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Rss.cls')
def test322(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Dumper.bas')
def test323(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Sort.Bas')
def test324(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Bytes.bas')
def test325(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Lpffile.bas')
def test326(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HttpHeader.cls')
def test327(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/bookUrl.cls')
def test328(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFile.cls')
def test329(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/AssistComboBox.cls')
def test330(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GInternet.cls')
def test331(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringMap.cls')
def test332(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/CFolderBrowser.cls')
def test333(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GAppSetting.cls')
def test334(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/IWinHttpClient.cls')
def test335(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinHttpDownloadEx.cls')
def test336(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFolderBrowser.cls')
def test337(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FolderBrowser.cls')
def test338(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GTestCStringMap.cls')
def test339(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinHttpSimple.cls')
def test340(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/LiNInI_Standalone.cls')
def test341(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Random.Bas')
def test342(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/UTF8.cls')
def test343(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Strings.Bas')
def test344(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MDebug.bas')
def test345(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Timer.bas')
def test346(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Download.cls')
def test347(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Unzip.bas')
def test348(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MRU.cls')
def test349(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/LiNInI.cls')
def test350(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GStrings.cls')
def test351(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/App.cls')
def test352(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GgCIni.cls')
def test353(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinHTTP.cls')
def test354(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/QuickWork.bas')
def test355(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/IDownloadClient.cls')
def test356(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Layout.bas')
def test357(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/AppSetting.bas')
def test358(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MFileSystem.bas')
def test359(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMZlib.cls')
def test360(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MYFAV.cls')
def test361(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/DGSwsHTTP.cls')
def test362(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GLayout.cls')
def test363(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Registry.cls')
def test364(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GQuickWork.cls')
def test365(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/LiNInI_ISetting.cls')
def test366(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GRegistry.cls')
def test367(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Forms.bas')
def test368(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ommonDialogLite.cls')
def test369(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMDebug.cls')
def test370(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MRUFileList.cls')
def test371(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GRandom.cls')
def test372(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ICharStream.cls')
def test373(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GBytes.cls')
def test374(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GSubclass.cls')
def test375(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GObjectHelper.cls')
def test376(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Parse.Bas')
def test377(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GPrivateProfile.cls')
def test378(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Subclass.cls')
def test379(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Map.cls')
def test380(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MUtility.bas')
def test381(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StdFonts.bas')
def test382(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/DownloadEx.cls')
def test383(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/omboboxHelper.bas')
def test384(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFileExecutor.cls')
def test385(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FileEncrypt.bas')
def test386(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/PrivateProfile.bas')
def test387(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/iniText.cls')
def test388(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMain.cls')
def test389(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MakeZhComment.cls')
def test390(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinHttpDownload.cls')
def test391(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/form.bas')
def test392(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GWindows.cls')
def test393(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GApiFileSystem.cls')
def test394(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GlassicIO.cls')
def test395(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GAlgorithms.cls')
def test396(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GgCString.cls')
def test397(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GDumper.cls')
def test398(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GString.cls')
def test399(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Zlib.bas')
def test400(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/gCIni.Bas')
def test401(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Windows.bas')
def test402(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ApiHelper.bas')
def test403(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Algorithms.bas')
def test404(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/IWinHttp.cls')
def test405(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GTimer.cls')
def test406(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GUnzip.cls')
def test407(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MZlib.bas')
def test408(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/TempFile.cls')
def test409(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Shell.bas')
def test410(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Utility.bas')
def test411(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/PathCollection.cls')
def test412(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GDlgOpenDir.cls')
def test413(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GArrays.cls')
def test414(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFileEncrypt.cls')
def test415(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FolderBrowser.bas')
def test416(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GDebug.cls')
def test417(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Gform.cls')
def test418(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Main.bas')
def test419(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/IApp.cls')
def test420(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Gdao.cls')
def test421(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/AutoSetting.cls')
def test422(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FileExecutor.bas')
def test423(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GZlib.cls')
def test424(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/gcHtmlWeb.Bas')
def test425(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GgCShell32.cls')
def test426(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/VBSetting.cls')
def test427(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Object.cls')
def test428(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GUtility.cls')
def test429(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/App.bas')
def test430(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/DlgOpenDir.bas')
def test431(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Timer.cls')
def test432(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Unzip.cls')
def test433(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/String.cls')
def test434(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FILEFUNC.BAS')
def test435(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GLanguage.cls')
def test436(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GHtmlWeb.cls')
def test437(self):
pass#self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GApp.cls')
def test438(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/SSRLib.bas')
def test439(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/VBSource.bas')
def test440(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMusic.cls')
def test441(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ObjectHelper.bas')
def test442(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GHttpHeader.cls')
def test443(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GLightShell.cls')
def test444(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/dao.bas')
def test445(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MParse.bas')
def test446(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ReaderViaHtml.cls')
def test447(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ISubclass.cls')
def test448(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FileDialog.cls')
def test449(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/LightShell.bas')
def test450(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ISetting.cls')
def test451(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringBuilder.cls')
def test452(self):
pass#self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HtmlWeb.bas')
def test453(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HttpHeader.bas')
def test454(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/CommonDialog.cls')
def test455(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/InterNetGet.cls')
def test456(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Internet.bas')
def test457(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GSystem.cls')
def test458(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ApiFileSystem.bas')
def test459(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Ini.bas')
def test460(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/System.bas')
def test461(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/enuArrHandle.cls')
def test462(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GHtmlDOM.cls')
def test463(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinHttpSimple_OLD.cls')
def test464(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GShell.cls')
def test465(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GApiTime.cls')
def test466(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/lassicIO.bas')
def test467(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/gCString.Bas')
def test468(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/UseZipProtocol.bas')
def test469(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Shell32.bas')
def test470(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Debug.bas')
def test471(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ApiTime.bas')
def test472(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/MFolderBrowser.bas')
def test473(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/DownloadProgress.cls')
def test474(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HTMLProperty.cls')
def test475(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GShell32.cls')
def test476(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HM.bas')
def test477(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMParse.cls')
def test478(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMFileSystem.cls')
def test479(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HtmlDOM.bas')
def test480(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GSSRLib.cls')
def test481(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Zlib.cls')
def test482(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ByteString.cls')
def test483(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Data Source.cls')
def test484(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StrCollection.cls')
def test485(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GHM.cls')
def test486(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/gCShell32.Bas')
def test487(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ToPY.bas')
def test488(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Music.bas')
def test489(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringChinese.cls')
def test490(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMFolderBrowser.cls')
def test491(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringAnalyser.cls')
def test492(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FileStreamIn.cls')
def test493(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GFileSystem.cls')
def test494(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringLink.cls')
def test495(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GWinInet.cls')
def test496(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Registry.bas')
def test497(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/omplex Data Consumer.cls')
def test498(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GgcHtmlWeb.cls')
def test499(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/HTTPConnection.cls')
def test500(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/CommonDialogLite.cls')
def test501(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/File.bas')
def test502(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GSort.cls')
def test503(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GomboboxHelper.cls')
def test504(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Subclass.Bas')
def test505(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/WinInet.bas')
def test506(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/FileSystem.bas')
def test507(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GIni.cls')
def test508(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GStdFonts.cls')
def test509(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Arrays.bas')
def test510(self):
pass#self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Setting.cls')
def test511(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/RecentFile.cls')
def test512(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GMUtility.cls')
def test513(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/StringBuilder(Copyed).cls')
def test514(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Reg.cls')
def test515(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/GUseZipProtocol.cls')
def test516(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoZipItem.cls')
def test517(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfozipShared.bas')
def test518(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoUnzip.bas')
def test519(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoZip.bas')
def test520(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/GInfoUnzip.cls')
def test521(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/GInfoZip.cls')
def test522(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/GInfozipShared.cls')
def test523(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoUnZip.cls')
def test524(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoZipItems.cls')
def test525(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/Infozip/InfoZip.cls')
def test526(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ssreader/BookInfo.cls')
def test527(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/Components/ssreader/gMSSReader.cls')
def test528(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/StackCol.cls')
def test529(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/WFormHlp.cls')
def test530(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TIcon.Frm')
def test531(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/SortOld.bas')
def test532(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Browse.frm')
def test533(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TExecute.frm')
def test534(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Meriwether.Frm')
def test535(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/ModGlobFilt.cls')
def test536(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TEdge.frm')
def test537(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TimeIt.bas')
def test538(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/EnumProc.bas')
def test539(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Cards.bas')
def test540(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/SieveCli.frm')
def test541(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/ModGlobDelFilt.cls')
def test542(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve.bas')
def test543(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/StackVec.cls')
def test544(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/DrivesO.cls')
def test545(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TPaths.Frm')
def test546(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TReg.Frm')
def test547(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/VB6ToVB5Main.bas')
def test548(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TCompletion.frm')
def test549(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TPalette.frm')
def test550(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/CtlTool.bas')
def test551(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/AllAbout.frm')
def test552(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/ParseO.bas')
def test553(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TRes.frm')
def test554(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/RegTlbOld.bas')
def test555(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TimeIt.frm')
def test556(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TestEnum.frm')
def test557(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/AppPath.bas')
def test558(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Thread.bas')
def test559(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/VBVerFilt.cls')
def test560(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/MenuList.cls')
def test561(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/WFileHlp.cls')
def test562(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TCollect.frm')
def test563(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/FileData.cls')
def test564(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TFolder.frm')
def test565(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Search.frm')
def test566(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Null.cls')
def test567(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/VB6ToVB5.frm')
def test568(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TBezier.frm')
def test569(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/RegTlb.bas')
def test570(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Window.cls')
def test571(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/GlobModFilt.cls')
def test572(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/IStack.cls')
def test573(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TWhiz.Frm')
def test574(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TWindow.frm')
def test575(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve.cls')
def test576(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/AddrOMatic.frm')
def test577(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/GlobWiz.frm')
def test578(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/BugFilt.cls')
def test579(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TDictionary.frm')
def test580(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TImage.frm')
def test581(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/SortRec.bas')
def test582(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TParse.frm')
def test583(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSort.frm')
def test584(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/MenuItem.cls')
def test585(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSysMenu.bas')
def test586(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSplit.frm')
def test587(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TShortcut.frm')
def test588(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TString.frm')
def test589(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TMessage.frm')
def test590(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/PubPrivFilt.cls')
def test591(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/BugWiz.frm')
def test592(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TThread.frm')
def test593(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/ErrMsg.frm')
def test594(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/SListbox.cls')
def test595(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Fun.frm')
def test596(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/FileData.bas')
def test597(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/WinFind.cls')
def test598(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TTimer.frm')
def test599(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Hardcore.frm')
def test600(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Problem.cls')
def test601(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/BitBlast.frm')
def test602(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSPLIT2.frm')
def test603(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/ResProc.bas')
def test604(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/GlobWizMain.bas')
def test605(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TRes2.frm')
def test606(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/StackLst.cls')
def test607(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TColorPick.frm')
def test608(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/CollWiz.frm')
def test609(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TShare.frm')
def test610(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/BugWizMain.bas')
def test611(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSort.bas')
def test612(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/TSysMenu.frm')
def test613(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/PrivPubFilt.cls')
def test614(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Edwina.frm')
def test615(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Winwatch.frm')
def test616(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Controls/Search.frm')
def test617(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Controls/ListItemWlk.cls')
def test618(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Controls/ListEnums.cls')
def test619(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorDblWalker.cls')
def test620(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/UseFile.cls')
def test621(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Instance.cls')
def test622(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/UseRegItems.cls')
def test623(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Palette.cls')
def test624(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Shared.cls')
def test625(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Errors.cls')
def test626(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Notify.bas')
def test627(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorInt.cls')
def test628(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorLngWalker.cls')
def test629(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Objects.bas')
def test630(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/IAnimation.cls')
def test631(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorSngWalker.cls')
def test632(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Keyboard.cls')
def test633(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/IVarWlk.cls')
def test634(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/WinIter.cls')
def test635(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Utility.cls')
def test636(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Timer.bas')
def test637(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ProcTool.cls')
def test638(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorIntWalker.cls')
def test639(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/WinAny.cls')
def test640(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Settings.cls')
def test641(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/IFileNotifier.cls')
def test642(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorDbl.cls')
def test643(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Vector.cls')
def test644(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/EnumVar.cls')
def test645(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorBool.cls')
def test646(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorLng.cls')
def test647(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/GDITool.cls')
def test648(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/WinTool.cls')
def test649(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Allocator.cls')
def test650(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/RegNode.cls')
def test651(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Subclass.cls')
def test652(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/DriveWlk.cls')
def test653(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/SortHelp.cls')
def test654(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/RegItemWlk.cls')
def test655(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Short.cls')
def test656(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Casts.cls')
def test657(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/RegItem.cls')
def test658(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Drives.cls')
def test659(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/List.cls')
def test660(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/System.cls')
def test661(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Shortcut.cls')
def test662(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorWlk.cls')
def test663(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ButterFly.cls')
def test664(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorSng.cls')
def test665(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Draw.cls')
def test666(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ComDlg.cls')
def test667(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Executive.cls')
def test668(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Math.cls')
def test669(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Random.cls')
def test670(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/RegNodeWlk.cls')
def test671(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorStr.cls')
def test672(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/UseFolder.cls')
def test673(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Module.cls')
def test674(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Version.cls')
def test675(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/IWinHelp.cls')
def test676(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ExeType.cls')
def test677(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Timer.cls')
def test678(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Drive.cls')
def test679(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Notify.cls')
def test680(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ISubclass.cls')
def test681(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/IFilter.cls')
def test682(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/SysMenu.cls')
def test683(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ISortHelp.cls')
def test684(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Sort.cls')
def test685(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Bytes.cls')
def test686(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Stack.cls')
def test687(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Video.cls')
def test688(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Shared.bas')
def test689(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorStrWalker.cls')
def test690(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Filter.cls')
def test691(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/TrayIcon.cls')
def test692(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/PicGlass.cls')
def test693(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/MinMax.cls')
def test694(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/About.cls')
def test695(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/FoldTool.cls')
def test696(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ColorPicker.cls')
def test697(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/RegTool.cls')
def test698(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/OpenPic.cls')
def test699(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/FileTool.cls')
def test700(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ListWalk.cls')
def test701(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ShareStr.cls')
def test702(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/VectorBoolWalker.cls')
def test703(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/FileInfo.cls')
def test704(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/PicTool.cls')
def test705(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ModTool.cls')
def test706(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Process.cls')
def test707(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Split.cls')
def test708(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Subclass.bas')
def test709(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/ComCtl.cls')
def test710(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/PalTool.cls')
def test711(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Components/Parse.cls')
def test712(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasExeP.cls')
def test713(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveGlobalN.cls')
def test714(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasExeN.frm')
def test715(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveMainP.Bas')
def test716(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasDllN.Cls')
def test717(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveMainN.Bas')
def test718(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasDllP.Cls')
def test719(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasExeN.cls')
def test720(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveBasExeP.frm')
def test721(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/Sieve/SieveGlobalP.cls')
def test722(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorInt.Cls')
def test723(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorStrWalker.Cls')
def test724(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Drive.cls')
def test725(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Video.Cls')
def test726(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Filter.Bas')
def test727(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Stack.Cls')
def test728(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Sort.Bas')
def test729(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Bytes.bas')
def test730(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Version.Cls')
def test731(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_UseRegItems.Cls')
def test732(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Link.cls')
def test733(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_SysMenu.Cls')
def test734(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/About.frm')
def test735(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_IFilter.Cls')
def test736(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Keyboard.cls')
def test737(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Random.Bas')
def test738(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Math.Bas')
def test739(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ExeType.Bas')
def test740(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/OpenPic.frm')
def test741(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ColorPicker.frm')
def test742(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/VB6Funcs.bas')
def test743(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_OpenPic.Cls')
def test744(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_System.Cls')
def test745(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/PicTool.bas')
def test746(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Drives.cls')
def test747(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ModTool.Bas')
def test748(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/BrowseBack.bas')
def test749(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_SortHelp.Cls')
def test750(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorDbl.Cls')
def test751(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/PalTool.Bas')
def test752(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Parse.bas')
def test753(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_DriveWlk.Cls')
def test754(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ComCtl.Bas')
def test755(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/CtlTool.bas')
def test756(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorLng.Cls')
def test757(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Split.cls')
def test758(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorSng.Cls')
def test759(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/FoldTool.Bas')
def test760(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_ButterFly.Cls')
def test761(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_About.cls')
def test762(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorWlk.Cls')
def test763(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_RegNodeWlk.Cls')
def test764(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Vector.cls')
def test765(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_List.Cls')
def test766(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/FileTool.Bas')
def test767(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Process.cls')
def test768(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Shortcut.Cls')
def test769(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/RegTool.Bas')
def test770(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/WinIter.Bas')
def test771(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Utility.bas')
def test772(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Palette.Cls')
def test773(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorIntWalker.Cls')
def test774(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_IWinHelp.cls')
def test775(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorStr.Cls')
def test776(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Settings.Bas')
def test777(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorLngWalker.Cls')
def test778(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Executive.Cls')
def test779(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_UseFile.Cls')
def test780(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/WinAny.Bas')
def test781(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorSngWalker.Cls')
def test782(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ProcTool.Bas')
def test783(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_UseFolder.Cls')
def test784(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_ISortHelp.Cls')
def test785(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_Module.cls')
def test786(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorBool.Cls')
def test787(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Notify.Cls')
def test788(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorDblWalker.Cls')
def test789(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/LightShell.bas')
def test790(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Errors.bas')
def test791(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_RegItemWlk.Cls')
def test792(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_IVarWlk.Cls')
def test793(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_RegNode.Cls')
def test794(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_IAnimation.Cls')
def test795(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Casts.Bas')
def test796(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Short.Bas')
def test797(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_PicGlass.Cls')
def test798(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Debug.bas')
def test799(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_TrayIcon.Cls')
def test800(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Gobals.bas')
def test801(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/ComDlg.bas')
def test802(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_ListWalk.Cls')
def test803(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_ShareStr.Cls')
def test804(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_FileInfo.Cls')
def test805(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Draw.Bas')
def test806(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Declare.bas')
def test807(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_EnumVar.Cls')
def test808(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_MinMax.Cls')
def test809(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_VectorBoolWalker.Cls')
def test810(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/EnumVar.bas')
def test811(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_RegItem.Cls')
def test812(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Subclass.Bas')
def test813(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/P_ColorPicker.Cls')
def test814(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/WinTool.Bas')
def test815(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/Allocator.Bas')
def test816(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/HardcodeVB/LocalModule/GDITool.Bas')
def test817(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TMap.cls')
def test818(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/IntegerMap.cls')
def test819(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/KeysetsStrInt.cls')
def test820(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TStack.cls')
def test821(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/StringMap.cls')
def test822(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Link.cls')
def test823(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ObjectStack.cls')
def test824(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/StringVector.cls')
def test825(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/LongMap.cls')
def test826(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ObjectMap.cls')
def test827(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/StringArray.cls')
def test828(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Array.cls')
def test829(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TDictionary.cls')
def test830(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Vector.cls')
def test831(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TKeysets.cls')
def test832(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Map.cls')
def test833(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/IndexTable.cls')
def test834(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/CHashTable.cls')
def test835(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ByteArray.cls')
def test836(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/BooleanMap.cls')
def test837(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ObjectLink.cls')
def test838(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/LongLink.cls')
def test839(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/LongStack.cls')
def test840(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/HashTable.cls')
def test841(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/LongVector.cls')
def test842(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/LongArray.cls')
def test843(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/IntegerLink.cls')
def test844(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/IntegerArray.cls')
def test845(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/BooleanArray.cls')
def test846(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TVector.cls')
def test847(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Stack.cls')
def test848(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TLink.cls')
def test849(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/StringStack.cls')
def test850(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ObjectArray.cls')
def test851(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/UniqueStringVector.cls')
def test852(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ObjectVector.cls')
def test853(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/TArray.cls')
def test854(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/StringLink.cls')
def test855(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/THashTable.cls')
def test856(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/THash.cls')
def test857(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/ByteMap.cls')
def test858(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/CStringArray.cls')
def test859(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/cHashTable.cls')
def test860(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/cStringVentor.cls')
def test861(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/CArray.cls')
def test862(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/TArray.cls')
def test863(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/THashTable.cls')
def test864(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Include]/ClassTemplate/Good/THash.cls')
def test865(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LExplorer/frmOptions.frm')
def test866(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LExplorer/Mainfrm.frm')
def test867(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LExplorer/modLExplorer.bas')
def test868(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LExplorer/frmBookmark.frm')
def test869(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocol/modReadFunc.bas')
def test870(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocol/IInternetProtocolInfo.cls')
def test871(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocol/zipHandler.cls')
def test872(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/TestVSS/Form1.frm')
def test873(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/TestVSS/Algorithms.bas')
def test874(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/XUnpack/XUNPack.frm')
def test875(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/XUnpack/XUNPack.bas')
def test876(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/IeSaveText/MainFrm.frm')
def test877(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/IeSaveText/IeSaveText.cls')
def test878(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/WenXin/frmMain.frm')
def test879(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/HtmlParser/Module1.bas')
def test880(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/HtmlParser/MTEST.bas')
def test881(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/CStopWatch.cls')
def test882(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/Test.bas')
def test883(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/Collect.cls')
def test884(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/OHash.cls')
def test885(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/Demo.frm')
def test886(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/Node.cls')
def test887(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/OHash.bas')
def test888(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Samples]/vbhash/Demo1/AHash.cls')
def test889(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/QuickWork/Module1.bas')
def test890(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/QuickWork/Inet.cls')
def test891(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/QuickWork/Form.frm')
def test892(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/QuickWork/MFileSystem.bas')
def test893(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/QuickWork/mainQuickWork.bas')
def test894(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ExecLine/frmMain.frm')
def test895(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/TimeCMD/TimeCMD.frm')
def test896(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ProgramLoader/frmMain.frm')
def test897(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/frmOptions.frm')
def test898(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/Mainfrm.frm')
def test899(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/modShareFunction.bas')
def test900(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/Main.bas')
def test901(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/frmServer.frm')
def test902(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/frmList.frm')
def test903(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zpShower/Source/frmBookmark.frm')
def test904(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssMdbQuery/Dialog.frm')
def test905(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssMdbQuery/Mainfrm.bas')
def test906(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssMdbQuery/MainFrm2.frm')
def test907(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/VBSourceWizard/GlobWiz.frm')
def test908(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/NextPage/NextPage.bas')
def test909(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/NextPage/JetCar.cls')
def test910(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/NextPage/frmSetting.frm')
def test911(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/NextPage/frmProgress.frm')
def test912(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/Options.frm')
def test913(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/Task.frm')
def test914(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/XTask.frm')
def test915(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/BatchTasksAdd.frm')
def test916(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/Task(TaskMan).frm')
def test917(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Forms/Main.frm')
def test918(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/frmOptions.frm')
def test919(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/frmProgress.frm')
def test920(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/Task.frm')
def test921(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/frmTask2.frm')
def test922(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/MainV2.frm')
def test923(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/Class1.cls')
def test924(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/MainV3.frm')
def test925(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/frmTask3.frm')
def test926(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/Main.frm')
def test927(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/CApp.cls')
def test928(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/CTask.cls')
def test929(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/ITaskNotify.cls')
def test930(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/sslib.bas')
def test931(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/Context/Handler.cls')
def test932(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/SSLibTaskman/Context/frmContextInfo.frm')
def test933(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/GetssLib/Modules/MTaskman.bas')
def test934(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/InvisibleRun/MMain.bas')
def test935(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocolBeta/modReadFunc.bas')
def test936(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocolBeta/IInternetProtocolInfo.cls')
def test937(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/zipProtocolBeta/zipHandler.cls')
def test938(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/MyMusic/mainfrm.frm')
def test939(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/MyMusic/rebuilddir.bas')
def test940(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Santa/frmMain.frm')
def test941(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/dllFolder_Browser/mBrowseForFolder.bas')
def test942(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/dllFolder_Browser/mDebugMsg.bas')
def test943(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/dllFolder_Browser/cCaptureBF.cls')
def test944(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/dllFolder_Browser/ICaptureBF.cls')
def test945(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/dllFolder_Browser/cBrowseForFolder.cls')
def test946(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/pdgzf/pdgZF.frm')
def test947(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/rssbho/forIE.cls')
def test948(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/rssbho/RSSBHO.cls')
def test949(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/iebho/CToolKit.cls')
def test950(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/iebho/cHandleForIElOG.cls')
def test951(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/iebho/cHanderForMimima.cls')
def test952(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bookUrl/CbookUrl.cls')
def test953(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bookUrl/MBookUrl.bas')
def test954(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bookUrl/CHander.cls')
def test955(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bookUrl/cmdFrm.frm')
def test956(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/MakeZhC/MakeZhc.bas')
def test957(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/MakeZhC/frmZHC.frm')
def test958(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/91reg/main.frm')
def test959(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/class/MYFAV.cls')
def test960(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/class/ZTM.cls')
def test961(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/class/Crss.cls')
def test962(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/pdgLibQuery/MainFrm.frm')
def test963(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/pdgLibQuery/Dialog.frm')
def test964(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/pdgLibQuery/MainFrm2.frm')
def test965(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/IncludeAll/ModuleMain.bas')
def test966(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FirefoxPortableLoader/Form1.frm')
def test967(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FirefoxPortableLoader/MMain.bas')
def test968(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/ZTMReader.bas')
def test969(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/string.bas')
def test970(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/lpffile.bas')
def test971(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/ZtmMake.bas')
def test972(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/HTMLFILE.bas')
def test973(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/kerak.bas')
def test974(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/PDGVIEW.bas')
def test975(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/PDG.bas')
def test976(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/Common.bas')
def test977(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/ZTMReaderbeta.bas')
def test978(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/modLReader.bas')
def test979(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/PDFlin.bas')
def test980(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/app.bas')
def test981(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/ZIPReaderPUBLIC.bas')
def test982(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/PDGPRO.bas')
def test983(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/system.bas')
def test984(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/ChmMake.bas')
def test985(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/kerak2.bas')
def test986(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/bas/file.bas')
def test987(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/Module1.bas')
def test988(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/MTEST.bas')
def test989(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/MMain.bas')
def test990(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/frmMain.frm')
def test991(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/Class1.cls')
def test992(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LiNVBLib/CApp.cls')
def test993(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FormatBlog/frmMain.frm')
def test994(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/xrzUnpackDir/frmMain.frm')
def test995(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Library]/ssreader/CBookInfo.cls')
def test996(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/[Library]/ssreader/MSSReader.bas')
def test997(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/cleaner/mainfrm.frm')
def test998(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FileStrReplace/MainFrm.frm')
def test999(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/gCUtility.cls')
def test1000(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/UnzipConstants.cls')
def test1001(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/mZip.bas')
def test1002(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/CZitItems.cls')
def test1003(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/modShareFunction.bas')
def test1004(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/CZipItem.cls')
def test1005(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/cUnZip.cls')
def test1006(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/CPathNameCollection.cls')
def test1007(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/mUnzip.bas')
def test1008(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/cZip.cls')
def test1009(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lUseZip/MTestZip.bas')
def test1010(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/frmOptions.frm')
def test1011(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/Mainfrm.frm')
def test1012(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/modShareFunction.bas')
def test1013(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/Main.bas')
def test1014(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/frmList.frm')
def test1015(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ArtCorridor/Source/frmBookmark.frm')
def test1016(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/PackChm/frmOptions.frm')
def test1017(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/PackChm/frmAbout.frm')
def test1018(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/PackChm/frmMain.frm')
def test1019(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateHelper.bas')
def test1020(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/Test.bas')
def test1021(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType.bas')
def test1022(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder.frm')
def test1023(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassTemplate.bas')
def test1024(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TypeInfo.bas')
def test1025(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateBuilder.cls')
def test1026(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType.cls')
def test1027(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/Filters/FilterConstVar.cls')
def test1028(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/Filters/FilterTypeName.cls')
def test1029(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/Filters/FilterModule.cls')
def test1030(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/Filters/FilterTypeOP.cls')
def test1031(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLYType.cls')
def test1032(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLLType.cls')
def test1033(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLEType.cls')
def test1034(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLPType.cls')
def test1035(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLDType.cls')
def test1036(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLQType.cls')
def test1037(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLXType.cls')
def test1038(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLMType.cls')
def test1039(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLSType.cls')
def test1040(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLFType.cls')
def test1041(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLOType.cls')
def test1042(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLZType.cls')
def test1043(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLNType.cls')
def test1044(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLRType.cls')
def test1045(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLGType.cls')
def test1046(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLWType.cls')
def test1047(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLBType.cls')
def test1048(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLKType.cls')
def test1049(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLJType.cls')
def test1050(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLVType.cls')
def test1051(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLCType.cls')
def test1052(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLHType.cls')
def test1053(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLAType.cls')
def test1054(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLTType.cls')
def test1055(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLUType.cls')
def test1056(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/TemplateType/TPLIType.cls')
def test1057(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/frmAbout.frm')
def test1058(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/FilterHelper.bas')
def test1059(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/App.cls')
def test1060(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/Type.cls')
def test1061(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/Parse.bas')
def test1062(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/TypeStyle.cls')
def test1063(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/TextBoxReportor.cls')
def test1064(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/ITemplateFilter.cls')
def test1065(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/IFilterReporter.cls')
def test1066(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/Main.bas')
def test1067(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/Settings.bas')
def test1068(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/frmMain.frm')
def test1069(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/Class1.cls')
def test1070(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/dlgEditList.frm')
def test1071(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ClassTemplate/ClassBuilder/ITypeInfo.cls')
def test1072(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BinTree/cHashTable.cls')
def test1073(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BinTree/cStringVentor.cls')
def test1074(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/BinTree/btString.cls')
def test1075(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/txtReader/frmOptions.frm')
def test1076(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/txtReader/Mainfrm.frm')
def test1077(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/txtReader/modtxtReader.bas')
def test1078(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/txtReader/frmBookmark.frm')
def test1079(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/practiceCOM/contextText.cls')
def test1080(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssLibBase/Dialog.frm')
def test1081(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssLibBase/MainFrm2.frm')
def test1082(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/ssLibBase/MSSReader.bas')
def test1083(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/FolderPacker/frmMain.frm')
def test1084(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LoadWow/Form1.frm')
def test1085(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/LoadWow/Main.bas')
def test1086(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lBlueSky/frmOptions.frm')
def test1087(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lBlueSky/modHttpServer.bas')
def test1088(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/lBlueSky/frmServer.frm')
def test1089(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/Rename pdg folders/Rename PDG folders.frm')
def test1090(self):
self._testFile('/Users/paul/Workspace/sandbox/vb2py-git-files/badcodes/p/frmP.frm')
if __name__ == '__main__':
unittest.main()
| 43.647793
| 130
| 0.78291
| 18,728
| 143,383
| 5.931546
| 0.100545
| 0.117855
| 0.166961
| 0.206246
| 0.79199
| 0.791558
| 0.791558
| 0.791558
| 0.791558
| 0.791558
| 0
| 0.032439
| 0.053779
| 143,383
| 3,284
| 131
| 43.661084
| 0.786345
| 0.003264
| 0
| 0.001373
| 0
| 0.497483
| 0.679015
| 0.676104
| 0
| 0
| 0
| 0
| 0
| 1
| 0.498856
| false
| 0.001373
| 0.000915
| 0
| 0.500229
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
6b42bcf91a44693809233d5779f906fa852a52bd
| 17,609
|
py
|
Python
|
indicators/tests/test_indicator_create_update.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | null | null | null |
indicators/tests/test_indicator_create_update.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | 5
|
2021-02-08T20:42:48.000Z
|
2022-03-12T00:19:38.000Z
|
indicators/tests/test_indicator_create_update.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import json
import uuid
from django.test import RequestFactory, TestCase
from django.urls import reverse_lazy
from factories import ResultFactory
from indicators.models import Indicator, PeriodicTarget
from indicators.views.views_indicators import PeriodicTargetJsonValidationError
from tola.test.base_classes import TestBase
class TestIndcatorCreateUpdateBase(TestBase):
def setUp(self):
super(TestIndcatorCreateUpdateBase, self).setUp()
# reset program start/end date
self.program.reporting_period_start = datetime.date(2018, 1, 1)
self.program.reporting_period_end = datetime.date(2020, 12, 31)
self.program.save()
def _base_indicator_post_data(self, target_frequency, periodic_targets):
return {
'name': 'Test Indicator',
'program_id': self.program.id,
'target_frequency': target_frequency,
'level': self.level.id,
'indicator_type': 1,
'unit_of_measure_type': 1,
'unit_of_measure': 1,
'lop_target': 3223,
'direction_of_change': Indicator.DIRECTION_OF_CHANGE_NONE,
'periodic_targets': json.dumps(periodic_targets),
'rationale': 'foo',
'indicator_key': uuid.uuid4()
}
class IndicatorCreateTests(TestIndcatorCreateUpdateBase, TestCase):
"""
Test the create indicator form api paths works, and PTs are created
"""
def setUp(self):
super(IndicatorCreateTests, self).setUp()
self.indicator.delete() # scrap this since we are making new indicators
def test_get(self):
url = reverse_lazy('indicator_create', args=[self.program.id])
response = self.client.get(url)
self.assertTemplateUsed(response, 'indicators/indicator_form_modal.html')
def test_lop_creation(self):
periodic_targets = []
data = self._base_indicator_post_data(Indicator.LOP, periodic_targets)
self.assertEqual(Indicator.objects.count(), 0)
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_create', args=[self.program.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(Indicator.objects.count(), 1)
self.assertEqual(PeriodicTarget.objects.count(), 1)
indicator = Indicator.objects.get()
pt = PeriodicTarget.objects.get()
self.assertEqual(pt.indicator, indicator)
self.assertEqual(pt.period_name, PeriodicTarget.LOP_PERIOD)
self.assertEqual(pt.target, indicator.lop_target)
def test_annual_creation(self):
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2018", "end_date": "Dec 31, 2018"},
{"id": 0, "period": "Year 2", "target": "2", "start_date": "Jan 1, 2019", "end_date": "Dec 31, 2019"},
{"id": 0, "period": "Year 3", "target": "3", "start_date": "Jan 1, 2020", "end_date": "Dec 31, 2020"}]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
self.assertEqual(Indicator.objects.count(), 0)
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_create', args=[self.program.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(Indicator.objects.count(), 1)
self.assertEqual(PeriodicTarget.objects.count(), 3)
indicator = Indicator.objects.get()
pt = PeriodicTarget.objects.order_by('start_date').first()
self.assertEqual(pt.indicator, indicator)
self.assertEqual(pt.period_name, 'Year 1')
self.assertEqual(pt.target, 1)
def test_events_creation(self):
periodic_targets = [{"id": 0, "period": "a", "target": "1", "start_date": "", "end_date": ""},
{"id": 0, "period": "b", "target": "2"}]
data = self._base_indicator_post_data(Indicator.EVENT, periodic_targets)
self.assertEqual(Indicator.objects.count(), 0)
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_create', args=[self.program.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(Indicator.objects.count(), 1)
self.assertEqual(PeriodicTarget.objects.count(), 2)
indicator = Indicator.objects.get()
pt = PeriodicTarget.objects.order_by('customsort').first()
self.assertEqual(pt.indicator, indicator)
self.assertEqual(pt.period_name, 'a')
self.assertEqual(pt.target, 1)
def test_annual_creation_invalid_json(self):
"""What if client sends in pad periodic_targets JSON?"""
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"}, # wrong dates
{"id": 0, "period": "Year 2", "target": "2", "start_date": "Jan 1, 2019", "end_date": "Dec 31, 2019"},
{"id": 0, "period": "Year 3", "target": "3", "start_date": "Jan 1, 2020", "end_date": "Dec 31, 2020"}]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
url = reverse_lazy('indicator_create', args=[self.program.id])
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"}, # too few pts
]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "-1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"},
# negative value
]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
class IndicatorUpdateTests(TestIndcatorCreateUpdateBase, TestCase):
"""
Test the update form API works, PTs are created, and that results are reassigned
"""
def setUp(self):
super(IndicatorUpdateTests, self).setUp()
self.result = ResultFactory(
periodic_target=None,
indicator=self.indicator,
program=self.program,
achieved=1024,
date_collected='2018-06-01'
)
def test_get(self):
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.get(url)
# self.assertContains(response, 'Indicator Performance Tracking Table')
self.assertTemplateUsed(response, 'indicators/indicator_form_modal.html')
def test_lop_update(self):
data = self._base_indicator_post_data(Indicator.LOP, [])
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(PeriodicTarget.objects.count(), 1)
self.result.refresh_from_db()
self.assertEqual(self.result.periodic_target, PeriodicTarget.objects.get())
# Does updating a second time update the dummy PT?
data['lop_target'] = 1024
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.post(url, data)
indicator = Indicator.objects.get()
pt = PeriodicTarget.objects.get()
self.assertEqual(pt.indicator, indicator)
self.assertEqual(pt.period_name, PeriodicTarget.LOP_PERIOD)
self.assertEqual(pt.target, indicator.lop_target)
def test_annual_update(self):
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2018", "end_date": "Dec 31, 2018"},
{"id": 0, "period": "Year 2", "target": "2", "start_date": "Jan 1, 2019", "end_date": "Dec 31, 2019"},
{"id": 0, "period": "Year 3", "target": "3", "start_date": "Jan 1, 2020", "end_date": "Dec 31, 2020"}]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(PeriodicTarget.objects.count(), 3)
self.result.refresh_from_db()
self.assertEqual(self.result.periodic_target, PeriodicTarget.objects.order_by('start_date').first())
def test_events_update(self):
# update with 2 events
periodic_targets = [{"id": 0, "period": "a", "target": "1", "start_date": "", "end_date": ""},
{"id": 0, "period": "b", "target": "2"}]
data = self._base_indicator_post_data(Indicator.EVENT, periodic_targets)
self.assertEqual(PeriodicTarget.objects.count(), 0)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(PeriodicTarget.objects.count(), 2)
pt = PeriodicTarget.objects.order_by('customsort').first()
pt2 = PeriodicTarget.objects.order_by('customsort').last()
self.assertEqual(pt.period_name, 'a')
self.assertEqual(pt.target, 1)
# update again with only 1 event
periodic_targets = [{"id": pt.id, "period": "aaa", "target": "111", "start_date": "", "end_date": ""},
{"id": pt2.id, "period": "b", "target": "2"}]
data = self._base_indicator_post_data(Indicator.EVENT, periodic_targets)
self.assertEqual(PeriodicTarget.objects.count(), 2)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(PeriodicTarget.objects.count(), 2)
pt = PeriodicTarget.objects.order_by('customsort').first()
self.assertEqual(pt.period_name, 'aaa')
self.assertEqual(pt.target, 111)
def test_annual_update_invalid_json(self):
"""What if client sends in pad periodic_targets JSON?"""
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"}, # wrong dates
{"id": 0, "period": "Year 2", "target": "2", "start_date": "Jan 1, 2019", "end_date": "Dec 31, 2019"},
{"id": 0, "period": "Year 3", "target": "3", "start_date": "Jan 1, 2020", "end_date": "Dec 31, 2020"}]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"}, # too few pts
]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "-1", "start_date": "Jan 1, 2017", "end_date": "Dec 31, 2017"}, # negative value
]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
with self.assertRaises(PeriodicTargetJsonValidationError):
self.client.post(url, data)
class PeriodicTargetsFormTests(TestBase, TestCase):
def setUp(self):
super(PeriodicTargetsFormTests, self).setUp()
def test_post(self):
# build form data using URL encoded form key value pairs
data = {
'name': 'Test+Name',
'program2': self.program.id,
'target_frequency': Indicator.ANNUAL,
'level': 1,
'indicator_type': 1,
'unit_of_measure_type': 1,
'unit_of_measure': 1,
'lop_target': 3223,
'program': self.program.id,
'direction_of_change': Indicator.DIRECTION_OF_CHANGE_NONE,
}
request = RequestFactory()
request.user = self.user
url = reverse_lazy('periodic_targets_form', args=[self.program.id])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
class DeletePeriodicTargetsTests(TestIndcatorCreateUpdateBase, TestCase):
"""
Test deleting all PTs in the indicator form, and deleting single event PTs
"""
def setUp(self):
super(DeletePeriodicTargetsTests, self).setUp()
self.result = ResultFactory(
periodic_target=None,
indicator=self.indicator,
program=self.program,
achieved=1024,
date_collected='2018-06-01'
)
def _create_lop_periodic_target_on_indicator(self):
data = self._base_indicator_post_data(Indicator.LOP, [])
url = reverse_lazy('indicator_update', args=[self.indicator.id])
self.client.post(url, data)
self.assertEqual(PeriodicTarget.objects.count(), 1)
# override lop target with one that matches PTs
self.assertEqual(self.indicator.calculated_lop_target, 3223)
self.indicator.lop_target = self.indicator.calculated_lop_target
self.indicator.save()
def _create_annual_periodic_targets_on_indicator(self):
"""Use the view to create some test data as opposed to using factories or fixtures"""
periodic_targets = [
{"id": 0, "period": "Year 1", "target": "1", "start_date": "Jan 1, 2018", "end_date": "Dec 31, 2018"},
{"id": 0, "period": "Year 2", "target": "2", "start_date": "Jan 1, 2019", "end_date": "Dec 31, 2019"},
{"id": 0, "period": "Year 3", "target": "3", "start_date": "Jan 1, 2020", "end_date": "Dec 31, 2020"}]
data = self._base_indicator_post_data(Indicator.ANNUAL, periodic_targets)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
self.client.post(url, data)
self.assertEqual(PeriodicTarget.objects.count(), 3)
# override lop target with one that matches PTs
self.assertEqual(self.indicator.calculated_lop_target, 6)
self.indicator.lop_target = self.indicator.calculated_lop_target
self.indicator.save()
def _create_event_targets_on_indicator(self):
periodic_targets = [{"id": 0, "period": "a", "target": "1", "start_date": "", "end_date": ""},
{"id": 0, "period": "b", "target": "2"}]
data = self._base_indicator_post_data(Indicator.EVENT, periodic_targets)
url = reverse_lazy('indicator_update', args=[self.indicator.id])
self.client.post(url, data)
self.assertEqual(PeriodicTarget.objects.count(), 2)
# override lop target with one that matches PTs
self.assertEqual(self.indicator.calculated_lop_target, 3)
self.indicator.lop_target = self.indicator.calculated_lop_target
self.indicator.save()
def test_deleting_annual_targets(self):
self._create_annual_periodic_targets_on_indicator()
# delete them all
url = reverse_lazy('pt_deleteall', args=[self.indicator.id])
self.client.post(url, {'rationale': 'a reason'})
# ensure PTs are gone and lop target has been updated
self.indicator.refresh_from_db()
self.assertEqual(self.indicator.periodictargets.count(), 0)
self.assertEqual(self.indicator.lop_target, None)
def test_deleting_lop_targets(self):
self._create_lop_periodic_target_on_indicator()
# delete them all
url = reverse_lazy('pt_deleteall', args=[self.indicator.id])
self.client.post(url, {'rationale': 'a reason'})
# ensure PTs are gone and lop target has been updated
self.indicator.refresh_from_db()
self.assertEqual(self.indicator.periodictargets.count(), 0)
self.assertEqual(self.indicator.lop_target, None)
def test_deleting_all_event_targets(self):
self._create_event_targets_on_indicator()
# delete them all
url = reverse_lazy('pt_deleteall', args=[self.indicator.id])
self.client.post(url, {'rationale': 'a reason'})
# ensure PTs are gone and lop target has been updated
self.indicator.refresh_from_db()
self.assertEqual(self.indicator.periodictargets.count(), 0)
self.assertEqual(self.indicator.lop_target, None)
def test_deleting_singl_event_target(self):
self._create_event_targets_on_indicator()
# delete them all
url = reverse_lazy('pt_delete', args=[self.indicator.periodictargets.first().id])
self.client.post(url, {'rationale': 'a reason'})
# ensure PTs are gone and lop target has been updated
self.indicator.refresh_from_db()
self.assertEqual(self.indicator.periodictargets.count(), 1)
self.assertEqual(self.indicator.lop_target, 2) # value of 2nd event target only
| 39.131111
| 133
| 0.642512
| 2,063
| 17,609
| 5.305381
| 0.095492
| 0.082229
| 0.020557
| 0.034171
| 0.805573
| 0.792325
| 0.778164
| 0.769027
| 0.754408
| 0.736409
| 0
| 0.030154
| 0.225964
| 17,609
| 449
| 134
| 39.218263
| 0.772854
| 0.069623
| 0
| 0.702797
| 0
| 0
| 0.136581
| 0.005706
| 0
| 0
| 0
| 0
| 0.237762
| 1
| 0.083916
| false
| 0
| 0.031469
| 0.003497
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b47d6e794053c0463436d9e2cfaf7d961488613
| 201
|
py
|
Python
|
stable_baselines/her/__init__.py
|
TreeKid/stable-baselines
|
129c1958160b95962b887c312cd2273aed35df60
|
[
"MIT"
] | 3,681
|
2018-07-02T16:07:58.000Z
|
2022-03-31T12:29:00.000Z
|
stable_baselines/her/__init__.py
|
TreeKid/stable-baselines
|
129c1958160b95962b887c312cd2273aed35df60
|
[
"MIT"
] | 1,088
|
2018-07-09T11:36:45.000Z
|
2022-03-31T23:50:35.000Z
|
stable_baselines/her/__init__.py
|
TreeKid/stable-baselines
|
129c1958160b95962b887c312cd2273aed35df60
|
[
"MIT"
] | 910
|
2018-07-23T12:16:47.000Z
|
2022-03-28T09:39:06.000Z
|
from stable_baselines.her.her import HER
from stable_baselines.her.replay_buffer import GoalSelectionStrategy, HindsightExperienceReplayWrapper
from stable_baselines.her.utils import HERGoalEnvWrapper
| 50.25
| 102
| 0.900498
| 23
| 201
| 7.695652
| 0.478261
| 0.169492
| 0.322034
| 0.372881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064677
| 201
| 3
| 103
| 67
| 0.941489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
86375f6e038f6ed1afd136a336f08be507a38533
| 23,127
|
py
|
Python
|
CovNTF/beta_nmf_np.py
|
TeunKrikke/SourceSeparationNMF
|
cb5ffb9665c5117f7ba382254ada84ef8d5ae4e0
|
[
"MIT"
] | null | null | null |
CovNTF/beta_nmf_np.py
|
TeunKrikke/SourceSeparationNMF
|
cb5ffb9665c5117f7ba382254ada84ef8d5ae4e0
|
[
"MIT"
] | null | null | null |
CovNTF/beta_nmf_np.py
|
TeunKrikke/SourceSeparationNMF
|
cb5ffb9665c5117f7ba382254ada84ef8d5ae4e0
|
[
"MIT"
] | null | null | null |
"""
Contains an error which makes the soundlevels go down.
"""
import time
import numpy as np
from librosa import load, stft, istft, resample
from librosa.output import write_wav
from sklearn.cluster import MiniBatchKMeans, FeatureAgglomeration
from sklearn import datasets
# import matplotlib.pyplot as plt
import mir_eval
import corpus
from update_and_cost_func import update_H, update_W
from update_and_cost_func import update_H_CA, update_W_CA
from update_and_cost_func import cost as cost_fn
class beta_NMF(object):
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(beta_NMF, self).__init__()
self._epochs = epochs
self._debug = debug
self._V = X
self._W = W
self._H = H
index = 0
self._beta = beta
def train(self):
pass
def reconstruct(self,k):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(self._W,self._H)
W = self._W[:,k].reshape((-1,1))
H = self._H[k,:].reshape((1,-1))
return np.multiply((np.dot(W,H)/V_hat), self._V)
def reconstruct_with_Z(self,k,Z):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(self._W,self._H)
W = self._W
H = np.multiply(self._H,(Z == k).astype(int).reshape(-1,1))
# print(H.shape)
return np.multiply((np.dot(W,H)/V_hat), self._V)
def reconstruct_with_Z_as_H(self,K,Z):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(self._W,self._H)
W = K
H = Z
# print(H.shape)
return np.multiply((np.dot(W,H)/V_hat), self._V)
def reconstruct_with_Z_t(self,k,Z):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(self._W,self._H)
W = self._W
H = np.multiply(self._H,(Z == k).astype(int))
# print(H.shape)
return np.multiply((np.dot(W,H)/V_hat), self._V)
class Unsupervised_NMF(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, frequencies, time_steps, sources, X, epochs=1000,
debug=False, beta=0):
self._frequencies = frequencies
self._time_steps = time_steps
W = np.asarray(np.random.rand(frequencies, sources) + np.ones((frequencies, sources)))
H = np.asarray(np.random.rand(sources, time_steps) + np.ones((sources, time_steps)))
super(Unsupervised_NMF, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W, self._H)
self._H = update_H(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W = update_W(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._frequencies,1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._time_steps,1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H
class Unsupervised_CA_NMF(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, frequencies, time_steps, sources, X, epochs=1000,
debug=False, beta=99):
self._frequencies = frequencies
self._time_steps = time_steps
W = np.asarray(np.random.rand(frequencies, sources) + np.ones((frequencies, sources)))
H = np.asarray(np.random.rand(sources, time_steps) + np.ones((sources, time_steps)))
super(Unsupervised_CA_NMF, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W, self._H)
self._H = update_H_CA(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W = update_W_CA(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._frequencies,1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._time_steps,1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H
class Semisupervised_NMF(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(Semisupervised_NMF, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W,self._H)
self._H = update_H(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W = update_W(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0],1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._H.shape[1],1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H
class Supervised_NMF_v1(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(Supervised_NMF_v1, self).__init__(W, H, X, epochs,
debug, beta=0)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W,self._H)
self._H = update_H(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W[:,1] = update_W(self._W, self._H,
self._V, V_hat, self._beta)[:,1]
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0],1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._H.shape[1],1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
class Supervised_NMF_v2(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(Supervised_NMF_v2, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(1, self._W.shape[0]):
self._W[i,:,:] = update_W(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
class Supervised_NMF_v3(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(Supervised_NMF_v3, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
class Supervised_NMF_v4(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=0):
super(Supervised_NMF_v4, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._W.shape[0]):
self._W[i,:,:] = update_W(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
class Semisupervised_CA_NMF(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=99):
super(Semisupervised_CA_NMF, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W,self._H)
self._H = update_H_CA(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W = update_W_CA(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0],1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._H.shape[1],1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H
class Supervised_CA_NMF_v1(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=99):
super(Supervised_CA_NMF_v1, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(self._W,self._H)
self._H = update_H_CA(self._W, self._H, self._V, V_hat, self._beta)
V_hat = np.dot(self._W,self._H)
self._W[:,1] = update_W_CA(self._W, self._H,
self._V, V_hat, self._beta)[:,1]
V_hat = np.dot(self._W,self._H)
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(self._W, axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0],1))
self._H = self._H * np.transpose(np.tile(np.power(scale,-1),(self._H.shape[1],1)))
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
class Supervised_CA_NMF_v2(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=99):
super(Supervised_CA_NMF_v2, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H_CA(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(1, self._W.shape[0]):
self._W[i,:,:] = update_W_CA(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
class Supervised_CA_NMF_v3(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=99):
super(Supervised_CA_NMF_v3, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H_CA(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
class Supervised_CA_NMF_v4(beta_NMF):
"""docstring for beta_NMF"""
def __init__(self, W, H, X, epochs=1000,
debug=False, beta=99):
super(Supervised_CA_NMF_v4, self).__init__(W, H, X, epochs,
debug, beta)
def train(self):
loss = []
for epoch in range(self._epochs):
tick = time.time()
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._H.shape[0]):
self._H[i,:,:] = update_H_CA(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
for i in range(self._W.shape[0]):
self._W[i,:,:] = update_W_CA(self._W[i,:,:], self._H[i,:,:],
self._V, V_hat, self._beta)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
cost = cost_fn(self._V, V_hat, self._beta)
loss.append(cost)
# print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
scale = np.sum(np.sum(self._W,axis=0), axis=0)
self._W = self._W * np.tile(np.power(scale,-1),(self._W.shape[0], self._W.shape[1], 1))
self._H = self._H * np.tile(np.power(scale,-1),(self._H.shape[0]*self._H.shape[2],1)).reshape(self._H.shape)
if self._debug:
print ('betaNMF -> iter {} time it took {}ms. This resulted in a loss of {}'.format(epoch, (time.time() - tick) * 1000, cost))
return self._W, self._H, loss
def reconstruct(self,k, X):
# V_hat = th.dot(W, H)
# W = W[:,k].reshape((-1,1))
# H = H[k,:].reshape((1,-1))
# return T.mul((T.dot(W,H)/V_hat),V)
V_hat = np.dot(np.sum(self._W, axis=0),np.sum(self._H, axis=0))
W = self._W[k,:,:].reshape(self._W.shape[1], self._W.shape[2])
H = self._H[k,:,:].reshape(self._H.shape[1], self._H.shape[2])
return np.multiply((np.dot(W,H)/V_hat), X)
| 42.049091
| 142
| 0.5347
| 3,639
| 23,127
| 3.175048
| 0.032701
| 0.076597
| 0.045179
| 0.039813
| 0.95283
| 0.952484
| 0.948243
| 0.947983
| 0.942098
| 0.942098
| 0
| 0.025044
| 0.286937
| 23,127
| 549
| 143
| 42.125683
| 0.675581
| 0.131102
| 0
| 0.837079
| 0
| 0
| 0.040258
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101124
| false
| 0.002809
| 0.030899
| 0
| 0.230337
| 0.033708
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86504e3771debc2ebe45d8bceaec1cad0f41cd73
| 93
|
py
|
Python
|
meiduo_mall/celery_tasks/config.py
|
bapewing/MeiduoMall
|
2b1634de2b7bc9eec80716770894824d668ec314
|
[
"MIT"
] | null | null | null |
meiduo_mall/celery_tasks/config.py
|
bapewing/MeiduoMall
|
2b1634de2b7bc9eec80716770894824d668ec314
|
[
"MIT"
] | null | null | null |
meiduo_mall/celery_tasks/config.py
|
bapewing/MeiduoMall
|
2b1634de2b7bc9eec80716770894824d668ec314
|
[
"MIT"
] | null | null | null |
BROKER_URL = 'redis://127.0.0.1:6379/14'
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/15'
| 31
| 51
| 0.698925
| 19
| 93
| 3.263158
| 0.631579
| 0.258065
| 0.290323
| 0.322581
| 0.483871
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0.275862
| 0.064516
| 93
| 2
| 52
| 46.5
| 0.436782
| 0
| 0
| 0
| 0
| 0
| 0.537634
| 0.537634
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
865acf55c2ecd5ba2e65f948503d9064facce9ff
| 10,523
|
py
|
Python
|
block.py
|
LiewMK/Pygame-SpriteGame-ProjectileMotion
|
784675223d865764d7e1dc574efc3b5b53b33405
|
[
"CC0-1.0"
] | null | null | null |
block.py
|
LiewMK/Pygame-SpriteGame-ProjectileMotion
|
784675223d865764d7e1dc574efc3b5b53b33405
|
[
"CC0-1.0"
] | null | null | null |
block.py
|
LiewMK/Pygame-SpriteGame-ProjectileMotion
|
784675223d865764d7e1dc574efc3b5b53b33405
|
[
"CC0-1.0"
] | null | null | null |
import pygame
class block():
blockmove = [pygame.image.load('Block1.png'), pygame.image.load('Block1.png'),
pygame.image.load('Block1.png'), pygame.image.load('Block1.png'),
pygame.image.load('Block1.png'), pygame.image.load('Block1.png'),
pygame.image.load('Block1.png'), pygame.image.load('Block1.png'),
pygame.image.load('Block2.png'), pygame.image.load('Block2.png'),
pygame.image.load('Block2.png'), pygame.image.load('Block2.png'),
pygame.image.load('Block2.png'), pygame.image.load('Block2.png'),
pygame.image.load('Block2.png'), pygame.image.load('Block2.png'),
pygame.image.load('Block3.png'), pygame.image.load('Block3.png'),
pygame.image.load('Block3.png'), pygame.image.load('Block3.png'),
pygame.image.load('Block3.png'), pygame.image.load('Block3.png'),
pygame.image.load('Block3.png'), pygame.image.load('Block3.png'),
pygame.image.load('Block4.png'), pygame.image.load('Block4.png'),
pygame.image.load('Block4.png'), pygame.image.load('Block4.png'),
pygame.image.load('Block4.png'), pygame.image.load('Block4.png'),
pygame.image.load('Block4.png'), pygame.image.load('Block4.png'),
pygame.image.load('Block5.png'), pygame.image.load('Block5.png'),
pygame.image.load('Block5.png'), pygame.image.load('Block5.png'),
pygame.image.load('Block5.png'), pygame.image.load('Block5.png'),
pygame.image.load('Block5.png'), pygame.image.load('Block5.png'),
pygame.image.load('Block6.png'), pygame.image.load('Block6.png'),
pygame.image.load('Block6.png'), pygame.image.load('Block6.png'),
pygame.image.load('Block6.png'), pygame.image.load('Block6.png'),
pygame.image.load('Block6.png'), pygame.image.load('Block6.png'),
pygame.image.load('Block7.png'), pygame.image.load('Block7.png'),
pygame.image.load('Block7.png'), pygame.image.load('Block7.png'),
pygame.image.load('Block7.png'), pygame.image.load('Block7.png'),
pygame.image.load('Block7.png'), pygame.image.load('Block7.png'),
pygame.image.load('Block8.png'), pygame.image.load('Block8.png'),
pygame.image.load('Block8.png'), pygame.image.load('Block8.png'),
pygame.image.load('Block8.png'), pygame.image.load('Block8.png'),
pygame.image.load('Block8.png'), pygame.image.load('Block8.png'),
pygame.image.load('Block9.png'), pygame.image.load('Block9.png'),
pygame.image.load('Block9.png'), pygame.image.load('Block9.png'),
pygame.image.load('Block9.png'), pygame.image.load('Block9.png'),
pygame.image.load('Block9.png'), pygame.image.load('Block9.png'),
pygame.image.load('Block10.png'), pygame.image.load('Block10.png'),
pygame.image.load('Block10.png'), pygame.image.load('Block10.png'),
pygame.image.load('Block10.png'), pygame.image.load('Block10.png'),
pygame.image.load('Block10.png'), pygame.image.load('Block10.png')]
block = pygame.image.load('Block1.png')
def __init__(self, x, y, width, height, health):
self.x = x
self.y = y
self.height = height
self.width = width
self.hitbox = (self.x - 5, self.y - 5, 48, 78)
self.health = health
self.die = 0
self.ell = True
def draw(self, win):
if self.health > 0:
win.blit(self.block, (self.x, self.y))
else:
if self.die < 80:
win.blit(self.blockmove[self.die], (self.x, self.y))
self.die += 1
else:
self.ell = False
self.hitbox = (0, 0, 0, 0)
def spirit(self):
return self.ell
def walk(self,win):
vel = 0.4
self.x -= vel
def Health(self):
if self.health > 0:
self.health -= 1
class gate():
gatemove = [pygame.image.load('Gate1.png'), pygame.image.load('Gate1.png'),
pygame.image.load('Gate1.png'), pygame.image.load('Gate1.png'),
pygame.image.load('Gate1.png'), pygame.image.load('Gate1.png'),
pygame.image.load('Gate1.png'), pygame.image.load('Gate1.png'),
pygame.image.load('Gate2.png'), pygame.image.load('Gate2.png'),
pygame.image.load('Gate2.png'), pygame.image.load('Gate2.png'),
pygame.image.load('Gate2.png'), pygame.image.load('Gate2.png'),
pygame.image.load('Gate2.png'), pygame.image.load('Gate2.png'),
pygame.image.load('Gate3.png'), pygame.image.load('Gate3.png'),
pygame.image.load('Gate3.png'), pygame.image.load('Gate3.png'),
pygame.image.load('Gate3.png'), pygame.image.load('Gate3.png'),
pygame.image.load('Gate3.png'), pygame.image.load('Gate3.png'),
pygame.image.load('Gate4.png'), pygame.image.load('Gate4.png'),
pygame.image.load('Gate4.png'), pygame.image.load('Gate4.png'),
pygame.image.load('Gate4.png'), pygame.image.load('Gate4.png'),
pygame.image.load('Gate4.png'), pygame.image.load('Gate4.png'),
pygame.image.load('Gate5.png'), pygame.image.load('Gate5.png'),
pygame.image.load('Gate5.png'), pygame.image.load('Gate5.png'),
pygame.image.load('Gate5.png'), pygame.image.load('Gate5.png'),
pygame.image.load('Gate5.png'), pygame.image.load('Gate5.png'),
pygame.image.load('Gate6.png'), pygame.image.load('Gate6.png'),
pygame.image.load('Gate6.png'), pygame.image.load('Gate6.png'),
pygame.image.load('Gate6.png'), pygame.image.load('Gate6.png'),
pygame.image.load('Gate6.png'), pygame.image.load('Gate6.png'),
pygame.image.load('Gate7.png'), pygame.image.load('Gate7.png'),
pygame.image.load('Gate7.png'), pygame.image.load('Gate7.png'),
pygame.image.load('Gate7.png'), pygame.image.load('Gate7.png'),
pygame.image.load('Gate7.png'), pygame.image.load('Gate7.png'),
pygame.image.load('Gate8.png'), pygame.image.load('Gate8.png'),
pygame.image.load('Gate8.png'), pygame.image.load('Gate8.png'),
pygame.image.load('Gate8.png'), pygame.image.load('Gate8.png'),
pygame.image.load('Gate8.png'), pygame.image.load('Gate8.png'),
pygame.image.load('Gate9.png'), pygame.image.load('Gate9.png'),
pygame.image.load('Gate9.png'), pygame.image.load('Gate9.png'),
pygame.image.load('Gate9.png'), pygame.image.load('Gate9.png'),
pygame.image.load('Gate9.png'), pygame.image.load('Gate9.png'),
pygame.image.load('Gate10.png'), pygame.image.load('Gate10.png'),
pygame.image.load('Gate10.png'), pygame.image.load('Gate10.png'),
pygame.image.load('Gate10.png'), pygame.image.load('Gate10.png'),
pygame.image.load('Gate10.png'), pygame.image.load('Gate10.png'),
pygame.image.load('Gate11.png'), pygame.image.load('Gate11.png'),
pygame.image.load('Gate11.png'), pygame.image.load('Gate11.png'),
pygame.image.load('Gate11.png'), pygame.image.load('Gate11.png'),
pygame.image.load('Gate11.png'), pygame.image.load('Gate11.png'),
pygame.image.load('Gate12.png'), pygame.image.load('Gate12.png'),
pygame.image.load('Gate12.png'), pygame.image.load('Gate12.png'),
pygame.image.load('Gate12.png'), pygame.image.load('Gate12.png'),
pygame.image.load('Gate12.png'), pygame.image.load('Gate12.png'),
pygame.image.load('Gate13.png'), pygame.image.load('Gate13.png'),
pygame.image.load('Gate13.png'), pygame.image.load('Gate13.png'),
pygame.image.load('Gate13.png'), pygame.image.load('Gate13.png'),
pygame.image.load('Gate13.png'), pygame.image.load('Gate13.png'),
pygame.image.load('Gate14.png'), pygame.image.load('Gate14.png'),
pygame.image.load('Gate14.png'), pygame.image.load('Gate14.png'),
pygame.image.load('Gate14.png'), pygame.image.load('Gate14.png'),
pygame.image.load('Gate14.png'), pygame.image.load('Gate14.png'),
pygame.image.load('Gate15.png'), pygame.image.load('Gate15.png'),
pygame.image.load('Gate15.png'), pygame.image.load('Gate15.png'),
pygame.image.load('Gate15.png'), pygame.image.load('Gate15.png'),
pygame.image.load('Gate15.png'), pygame.image.load('Gate15.png'),
pygame.image.load('Gate16.png'), pygame.image.load('Gate16.png'),
pygame.image.load('Gate16.png'), pygame.image.load('Gate16.png'),
pygame.image.load('Gate16.png'), pygame.image.load('Gate16.png'),
pygame.image.load('Gate16.png'), pygame.image.load('Gate16.png')]
gate = pygame.image.load('Gate1.png')
gate2 = pygame.image.load('Gate16.png')
def __init__(self, x, y, width, height, health):
self.x = x
self.y = y
self.height = height
self.width = width
self.hitbox = (self.x - 5, self.y - 5, 48, 78)
self.health = health
self.die = 0
self.ell = True
def draw(self, win):
if self.health > 0:
win.blit(self.gate, (self.x, self.y))
self.walkCount = 0
self.angle = -90
else:
if self.die < 128:
win.blit(self.gatemove[self.die], (self.x, self.y))
self.die += 1
else:
win.blit(self.gate2, (self.x, self.y))
self.ell = False
self.hitbox = (0, 0, 0, 0)
def spirit(self):
return self.ell
def Health(self):
if self.health > 0:
self.health -= 1
| 60.131429
| 85
| 0.553454
| 1,288
| 10,523
| 4.515528
| 0.053571
| 0.399072
| 0.544188
| 0.637552
| 0.963549
| 0.948246
| 0.948246
| 0.948246
| 0.948246
| 0.948246
| 0
| 0.041128
| 0.265229
| 10,523
| 174
| 86
| 60.477011
| 0.711071
| 0
| 0
| 0.865854
| 0
| 0
| 0.197604
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054878
| false
| 0
| 0.006098
| 0.012195
| 0.115854
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8663dff19294427227830b69efc86478a4a0b3a6
| 37,393
|
py
|
Python
|
UltronRoBo/modules/logo.py
|
UltronRoBo/UltronRoBoAssistant
|
874dcf725d453ffabd85543533d2a07676af4d65
|
[
"MIT"
] | null | null | null |
UltronRoBo/modules/logo.py
|
UltronRoBo/UltronRoBoAssistant
|
874dcf725d453ffabd85543533d2a07676af4d65
|
[
"MIT"
] | null | null | null |
UltronRoBo/modules/logo.py
|
UltronRoBo/UltronRoBoAssistant
|
874dcf725d453ffabd85543533d2a07676af4d65
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2021 UltronRoBo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from UltronRoBo.events import UltronLogo
from UltronRoBo import OWNER_ID
from UltronRoBo import telethn as tbot
import os
from PIL import Image, ImageDraw, ImageFont
import random
@UltronLogo(pattern="^/logo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Chopsic.otf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/wlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo...wait!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "white"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Maghrib.ttf", 1000)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="white", stroke_width=0, stroke_fill="white")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
##
@UltronLogo(pattern="^/blogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "red"
shadowcolor = "orange"
font = ImageFont.truetype("./UltronRoBo/fonts/BeASt.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="red")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/clogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "blue"
shadowcolor = "yellow"
font = ImageFont.truetype("./UltronRoBo/fonts/CindrellaPersonalUse.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="blue")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/crlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Circus.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/dclogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/DeathCrow.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/dslogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/DroidSansMono.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/flogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/FlamanteStencilBold.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/glogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/GangOfThree.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/hlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/HanSolo.otf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/ilogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Impact.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/lclogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/LordcorpsStencil.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/lslogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/LucidStreams.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/alogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/LucidStreamsLaminar.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/nlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/NightMachine.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/qlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Quivira.otf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/rrlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/RoadRage.otf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/rilogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Roboto-Italic.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/rmlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Roboto-Medium.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/rlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/Robot-Regular.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/vlogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/VampireWars.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
@UltronLogo(pattern="^/vilogo ?(.*)")
async def lego(event):
quew = event.pattern_match.group(1)
if event.sender_id == OWNER_ID:
pass
else:
if not quew:
await event.reply('Provide Some Text To Draw!')
return
else:
pass
await event.reply('Creating your logo... waimt!')
try:
text = event.pattern_match.group(1)
img = Image.open('./UltronRoBo/resources/LogoBG.png')
draw = ImageDraw.Draw(img)
image_widthz, image_heightz = img.size
pointsize = 500
fillcolor = "gold"
shadowcolor = "blue"
font = ImageFont.truetype("./UltronRoBo/fonts/VampireWarsItalic.ttf", 330)
w, h = draw.textsize(text, font=font)
h += int(h*0.21)
image_width, image_height = img.size
draw.text(((image_widthz-w)/2, (image_heightz-h)/2), text, font=font, fill=(255, 255, 255))
x = (image_widthz-w)/2
y= ((image_heightz-h)/2+6)
draw.text((x, y), text, font=font, fill="black", stroke_width=25, stroke_fill="yellow")
fname2 = "LogoByUltronRoBo.png"
img.save(fname2, "png")
await tbot.send_file(event.chat_id, fname2, caption="**Logo created, and uploaded as per the request by 𝑼𝒍𝒕𝒓𝒐𝒏𝑹𝒐𝑩𝒐.**")
if os.path.exists(fname2):
os.remove(fname2)
except Exception as e:
await event.reply(f'Error, Report at @UltronSupportChat, {e}')
file_help = os.path.basename(__file__)
file_help = file_help.replace(".py", "")
file_helpo = file_help.replace("_", " ")
__mod_name__ = "Logo"
__help__ = """
❍ /logo <text> *:* Logo with Chopsic Font.
❍ /wlogo <text> *:* Logo with Maghrib Font.
❍ /blogo <text> *:* Logo with BeASt Font.
❍ /clogo <text> *:* Logo with CindrellaPersonalUse Font.
❍ /crlogo <text> *:* Logo with Circus Font.
❍ /dclogo <text> *:* Logo with DeathCrow Font.
❍ /dslogo <text> *:* Logo with DroidSansMono Font.
❍ /flogo <text> *:* Logo with FlamanteStencilBold Font.
❍ /glogo <text> *:* Logo with GangOfThree Font.
❍ /hlogo <text> *:* Logo with HanSolo Font.
❍ /ilogo <text> *:* Logo with Impact Font.
❍ /lclogo <text> *:* Logo with LordcorpsStencil Font.
❍ /lslogo <text> *:* Logo with LucidStreams Font.
❍ /alogo <text> *:* Logo with LucidStreamsLaminar Font.
❍ /nlogo <text> *:* Logo with NightMachine Font.
❍ /qlogo <text> *:* Logo with Quivira Font.
❍ /rrlogo <text> *:* Logo with RoadRage Font.
❍ /rilogo <text> *:* Logo with Roboto-Italic Font.
❍ /rmlogo <text> *:* Logo with Roboto-Medium Font.
❍ /rlogo <text> *:* Logo with Roboto-Regular Font.
❍ /vlogo <text> *:* Logo with VampireWars Font.
❍ /vilogo <text> *:* Logo with VapireWarsItalic Font.
"""
| 42.686073
| 131
| 0.556655
| 4,512
| 37,393
| 4.535683
| 0.056294
| 0.03225
| 0.048375
| 0.0473
| 0.887173
| 0.883655
| 0.883655
| 0.881163
| 0.881163
| 0.881163
| 0
| 0.027766
| 0.318081
| 37,393
| 875
| 132
| 42.734857
| 0.773952
| 0
| 0
| 0.891791
| 0
| 0
| 0.200793
| 0.040836
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.054726
| 0.007463
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
86ad1266df74d7cde96e71e4f3dca21567a6cdd5
| 10,253
|
py
|
Python
|
maskrcnn_benchmark/nas/modeling/micro_heads.py
|
DominickZhang/NAS-FCOS
|
1f7281478430eaed028e2cc2dfa8be226c63939b
|
[
"BSD-2-Clause"
] | 187
|
2019-07-04T08:48:52.000Z
|
2022-03-16T09:17:59.000Z
|
maskrcnn_benchmark/nas/modeling/micro_heads.py
|
DominickZhang/NAS-FCOS
|
1f7281478430eaed028e2cc2dfa8be226c63939b
|
[
"BSD-2-Clause"
] | 10
|
2019-07-09T03:08:55.000Z
|
2022-01-15T13:45:39.000Z
|
maskrcnn_benchmark/nas/modeling/micro_heads.py
|
DominickZhang/NAS-FCOS
|
1f7281478430eaed028e2cc2dfa8be226c63939b
|
[
"BSD-2-Clause"
] | 35
|
2019-07-20T00:05:24.000Z
|
2022-03-30T03:56:32.000Z
|
import math
import copy
import torch
import torch.nn as nn
import torch.nn.functional as F
from .layer_factory import HEAD_OPS, AGG_OPS
from ..rl.genotypes import HEAD_OP_NAMES, HEAD_AGG_NAMES
from maskrcnn_benchmark.layers import Scale
class MicroHead_v2(nn.Module):
"""
Simplified head arch which is used to search and construct single-stage
detector head part
"""
def __init__(self, share_weights_layer, head_config, repeats, cfg):
"""
Arguments:
head_config: head arch sampled by controller
cfg: global setting info
"""
super(MicroHead_v2, self).__init__()
self.num_classes = cfg.MODEL.RETINANET.NUM_CLASSES - 1
self.in_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
self.num_head_layers = cfg.SEARCH.HEAD.NUM_HEAD_LAYERS
self.num_head = 5
self.output_concat = cfg.SEARCH.HEAD.OUTPUT_CONCAT
self.fpn_strides = cfg.MODEL.RETINANET.ANCHOR_STRIDES
self.dense_points = 1
self.norm_reg_targets = False
self.centerness_on_reg = False
self.share_weights_layer = share_weights_layer
assert self.share_weights_layer >= 0
assert self.share_weights_layer <= self.num_head_layers
# judge whether to have split_weights
if self.share_weights_layer == 0:
self.has_split_weights = False
else:
self.has_split_weights = True
# judge whether to have shared_weights
if self.share_weights_layer == self.num_head_layers:
self.has_shared_weights = False
else:
self.has_shared_weights = True
if self.has_split_weights:
self._cls_head_split_ops = nn.ModuleList()
self._reg_head_split_ops = nn.ModuleList()
for ind in range(self.num_head):
cls_empty_head_layer = nn.ModuleList()
reg_empty_head_layer = nn.ModuleList()
self._cls_head_split_ops.append(cls_empty_head_layer)
self._reg_head_split_ops.append(reg_empty_head_layer)
if self.has_shared_weights:
self._cls_head_global_ops = nn.ModuleList()
self._reg_head_global_ops = nn.ModuleList()
agg_size = self.in_channels
for ind, cell in enumerate(head_config):
op_index = cell
op_name = HEAD_OP_NAMES[op_index]
_cls_ops = HEAD_OPS[op_name](agg_size, 1, True, repeats=repeats)
_reg_ops = HEAD_OPS[op_name](agg_size, 1, True, repeats=repeats)
if ind < self.share_weights_layer:
# do not share weights
for ind2 in range(self.num_head):
self._cls_head_split_ops[ind2].append(copy.deepcopy(_cls_ops))
self._reg_head_split_ops[ind2].append(copy.deepcopy(_reg_ops))
else:
# share weights
self._cls_head_global_ops.append(_cls_ops)
self._reg_head_global_ops.append(_reg_ops)
final_channel = self.in_channels
self.cls_logits = nn.Conv2d(
final_channel, self.num_classes * self.dense_points, kernel_size=3, stride=1,
padding=1
)
self.bbox_pred = nn.Conv2d(
final_channel, 4 * self.dense_points, kernel_size=3, stride=1,
padding=1
)
self.centerness = nn.Conv2d(
final_channel, 1 * self.dense_points, kernel_size=3, stride=1,
padding=1
)
self.scales = nn.ModuleList([Scale(init_value=1.0) for _ in range(5)])
# initialization
for modules in [self.cls_logits, self.bbox_pred, self.centerness]:
for l in modules.modules():
if isinstance(l, nn.Conv2d):
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
# retinanet_bias_init
prior_prob = cfg.MODEL.RETINANET.PRIOR_PROB
bias_value = -math.log((1 - prior_prob) / prior_prob)
torch.nn.init.constant_(self.cls_logits.bias, bias_value)
def forward(self, x):
logits = []
bbox_reg = []
centerness = []
for l, feature in enumerate(x):
cls_out = feature
reg_out = feature
# obtain initial pools
if self.has_split_weights:
# compute cls_split_feats
for ops in self._cls_head_split_ops[l]:
cls_out = ops(cls_out)
# compute reg_split_feats
for ops in self._reg_head_split_ops[l]:
reg_out = ops(reg_out)
if self.has_shared_weights:
# compute cls_global_feats
for ops in self._cls_head_global_ops:
cls_out = ops(cls_out)
# compute reg_global_feats
for ops in self._reg_head_global_ops:
reg_out = ops(reg_out)
logits.append(self.cls_logits(cls_out))
if self.centerness_on_reg:
centerness.append(self.centerness(reg_out))
else:
centerness.append(self.centerness(cls_out))
bbox_pred = self.scales[l](self.bbox_pred(reg_out))
if self.norm_reg_targets:
if self.training:
bbox_pred = F.relu(bbox_pred)
bbox_reg.append(bbox_pred)
else:
bbox_reg.append(bbox_pred * self.fpn_strides[l])
else:
bbox_reg.append(torch.exp(bbox_pred))
return logits, bbox_reg, centerness
class MicroHead_v2_retinanet(nn.Module):
"""
Simplified head arch which is used to search and construct single-stage
detector head part
"""
def __init__(self, share_weights_layer, head_config, repeats, cfg):
"""
Arguments:
head_config: head arch sampled by controller
cfg: global setting info
"""
super(MicroHead_v2_retinanet, self).__init__()
self.num_head = 5
self.num_classes = cfg.MODEL.RETINANET.NUM_CLASSES - 1
self.num_head_layers = cfg.SEARCH.HEAD.NUM_HEAD_LAYERS
self.in_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
self.share_weights_layer = share_weights_layer
self.num_anchors = len(cfg.MODEL.RETINANET.ASPECT_RATIOS) \
* cfg.MODEL.RETINANET.SCALES_PER_OCTAVE
assert self.share_weights_layer >= 0
assert self.share_weights_layer <= self.num_head_layers
# judge whether to have split_weights
if self.share_weights_layer == 0:
self.has_split_weights = False
else:
self.has_split_weights = True
# judge whether to have shared_weights
if self.share_weights_layer == self.num_head_layers:
self.has_shared_weights = False
else:
self.has_shared_weights = True
if self.has_split_weights:
self._cls_head_split_ops = nn.ModuleList()
self._reg_head_split_ops = nn.ModuleList()
for ind in range(self.num_head):
cls_empty_head_layer = nn.ModuleList()
reg_empty_head_layer = nn.ModuleList()
self._cls_head_split_ops.append(cls_empty_head_layer)
self._reg_head_split_ops.append(reg_empty_head_layer)
if self.has_shared_weights:
self._cls_head_global_ops = nn.ModuleList()
self._reg_head_global_ops = nn.ModuleList()
agg_size = self.in_channels
for ind, cell in enumerate(head_config):
op_index = cell
op_name = HEAD_OP_NAMES[op_index]
_cls_ops = HEAD_OPS[op_name](agg_size, 1, True, repeats=repeats)
_reg_ops = HEAD_OPS[op_name](agg_size, 1, True, repeats=repeats)
if ind < self.share_weights_layer:
# do not share weights
for ind2 in range(self.num_head):
self._cls_head_split_ops[ind2].append(copy.deepcopy(_cls_ops))
self._reg_head_split_ops[ind2].append(copy.deepcopy(_reg_ops))
else:
# share weights
self._cls_head_global_ops.append(_cls_ops)
self._reg_head_global_ops.append(_reg_ops)
final_channel = self.in_channels
self.cls_logits = nn.Conv2d(
final_channel, self.num_classes * self.num_anchors, kernel_size=3, stride=1,
padding=1
)
self.bbox_pred = nn.Conv2d(
final_channel, 4 * self.num_anchors, kernel_size=3, stride=1,
padding=1
)
# initialization
for modules in [self.cls_logits, self.bbox_pred]:
for l in modules.modules():
if isinstance(l, nn.Conv2d):
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
# retinanet_bias_init
prior_prob = cfg.MODEL.RETINANET.PRIOR_PROB
bias_value = -math.log((1 - prior_prob) / prior_prob)
torch.nn.init.constant_(self.cls_logits.bias, bias_value)
def forward(self, x):
logits = []
bbox_reg = []
for l, feature in enumerate(x):
cls_out = feature
reg_out = feature
# obtain initial pools
if self.has_split_weights:
# compute cls_split_feats
for ops in self._cls_head_split_ops[l]:
cls_out = ops(cls_out)
# compute reg_split_feats
for ops in self._reg_head_split_ops[l]:
reg_out = ops(reg_out)
if self.has_shared_weights:
# compute cls_global_feats
for ops in self._cls_head_global_ops:
cls_out = ops(cls_out)
# compute reg_global_feats
for ops in self._reg_head_global_ops:
reg_out = ops(reg_out)
logits.append(self.cls_logits(cls_out))
bbox_reg.append(self.bbox_pred(reg_out))
return logits, bbox_reg
| 37.01444
| 92
| 0.596411
| 1,297
| 10,253
| 4.365459
| 0.11488
| 0.027199
| 0.04804
| 0.051925
| 0.848817
| 0.829566
| 0.829566
| 0.816849
| 0.816849
| 0.803073
| 0
| 0.008838
| 0.326831
| 10,253
| 277
| 93
| 37.01444
| 0.811504
| 0.086219
| 0
| 0.751323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021164
| 1
| 0.021164
| false
| 0
| 0.042328
| 0
| 0.084656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86d473a44d208ad792cadffca97ef0846a555be9
| 6,907
|
py
|
Python
|
problems/the_travelling_salesman_problem.py
|
eppica/Simplex-Method
|
02f2c9da195993905ae4480c3048b43cf0e76fc7
|
[
"MIT"
] | null | null | null |
problems/the_travelling_salesman_problem.py
|
eppica/Simplex-Method
|
02f2c9da195993905ae4480c3048b43cf0e76fc7
|
[
"MIT"
] | 4
|
2021-03-21T21:52:41.000Z
|
2021-03-21T22:03:11.000Z
|
problems/the_travelling_salesman_problem.py
|
eppica/Simplex-Method
|
02f2c9da195993905ae4480c3048b43cf0e76fc7
|
[
"MIT"
] | null | null | null |
M = 9999
base_variables = ['x12', 'x13', 'x14', 'x15', 'x21', 'x23', 'x24', 'x25', 'x31', 'x32', 'x34', 'x35', 'x41', 'x42', 'x43', 'x45', 'x51', 'x52', 'x53', 'x54', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10', 'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19', 'f20', 'f21', 'f22', 'f23', 'f24', 'f25', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'a10']
non_base_variables = ['f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'f10', 'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19', 'f20', 'f21', 'f22', 'f23', 'f24', 'f25', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'a10']
matrix = [
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]]
independent_terms = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
objective = [63.6, 53.2, 90.4, 159.4, 63.6, 90.8, 106.8, 169.4, 53.2, 90.8, 135.4, 157.6, 90.4, 106.8, 135.4, 125.4, 159.4, 169.4, 157.6, 125.4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, M, M, M, M, M, M, M, M, M, M]
total = 0
quotients = []
| 135.431373
| 384
| 0.345302
| 2,139
| 6,907
| 1.113137
| 0.038336
| 1.409492
| 1.959261
| 2.410752
| 0.908862
| 0.908862
| 0.908862
| 0.908862
| 0.908862
| 0.900462
| 0
| 0.46199
| 0.308672
| 6,907
| 50
| 385
| 138.14
| 0.036649
| 0
| 0
| 0
| 0
| 0
| 0.033879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
86e796167486656a3c752d113deb6881f8b9bf52
| 1,613
|
py
|
Python
|
test/functional/test_evaluator.py
|
fedden/pluribus
|
73fb394b26623c897459ffa3e66d7a5cb47e9962
|
[
"MIT"
] | 2
|
2020-01-12T07:59:56.000Z
|
2020-01-13T10:04:26.000Z
|
test/functional/test_evaluator.py
|
fedden/pluribus
|
73fb394b26623c897459ffa3e66d7a5cb47e9962
|
[
"MIT"
] | null | null | null |
test/functional/test_evaluator.py
|
fedden/pluribus
|
73fb394b26623c897459ffa3e66d7a5cb47e9962
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.parametrize("suit", ["c", "d", "h", "s"])
def test_evaluator_royal_flush(suit):
from poker_ai.poker.evaluation.eval_card import EvaluationCard
from poker_ai.poker.evaluation.evaluator import Evaluator
board = [
EvaluationCard.new(f"A{suit}"),
EvaluationCard.new(f"K{suit}"),
EvaluationCard.new(f"Q{suit}"),
]
hand = [EvaluationCard.new(f"J{suit}"), EvaluationCard.new(f"T{suit}")]
evaluator = Evaluator()
rank = evaluator.evaluate(board, hand)
hand_class_int = evaluator.get_rank_class(rank)
hand_class_str = evaluator.class_to_string(hand_class_int).lower()
if hand_class_int != 1:
raise ValueError
if hand_class_str != "straight flush":
raise ValueError
if rank != 1:
raise ValueError
@pytest.mark.parametrize("suit", ["c", "d", "h", "s"])
def test_evaluator_straight_flush(suit):
from poker_ai.poker.evaluation.eval_card import EvaluationCard
from poker_ai.poker.evaluation.evaluator import Evaluator
board = [
EvaluationCard.new(f"9{suit}"),
EvaluationCard.new(f"K{suit}"),
EvaluationCard.new(f"Q{suit}"),
]
hand = [EvaluationCard.new(f"J{suit}"), EvaluationCard.new(f"T{suit}")]
evaluator = Evaluator()
rank = evaluator.evaluate(board, hand)
hand_class_int = evaluator.get_rank_class(rank)
hand_class_str = evaluator.class_to_string(hand_class_int).lower()
if hand_class_int != 1:
raise ValueError
if hand_class_str != "straight flush":
raise ValueError
if rank != 2:
raise ValueError
| 35.065217
| 75
| 0.675139
| 210
| 1,613
| 4.995238
| 0.219048
| 0.162059
| 0.171592
| 0.125834
| 0.943756
| 0.943756
| 0.943756
| 0.943756
| 0.943756
| 0.943756
| 0
| 0.003849
| 0.194668
| 1,613
| 45
| 76
| 35.844444
| 0.803695
| 0
| 0
| 0.780488
| 0
| 0
| 0.070676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.121951
| 0
| 0.170732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86faedc7afbce6caf1574da895be7b0aefe43b33
| 202,548
|
py
|
Python
|
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/rds_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/rds_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-rds/huaweicloudsdkrds/v3/rds_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class RdsClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(RdsClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkrds.v3.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "RdsClient":
raise TypeError("client type error, support client type is RdsClient")
return ClientBuilder(clazz)
def attach_eip(self, request):
"""绑定和解绑弹性公网IP
绑定和解绑弹性公网IP。
:param AttachEipRequest request
:return: AttachEipResponse
"""
return self.attach_eip_with_http_info(request)
def attach_eip_with_http_info(self, request):
"""绑定和解绑弹性公网IP
绑定和解绑弹性公网IP。
:param AttachEipRequest request
:return: AttachEipResponse
"""
all_params = ['instance_id', 'bind_eip_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/public-ip',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AttachEipResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_tag_add_action(self, request):
"""批量添加标签
批量添加标签。
:param BatchTagAddActionRequest request
:return: BatchTagAddActionResponse
"""
return self.batch_tag_add_action_with_http_info(request)
def batch_tag_add_action_with_http_info(self, request):
"""批量添加标签
批量添加标签。
:param BatchTagAddActionRequest request
:return: BatchTagAddActionResponse
"""
all_params = ['instance_id', 'batch_tag_action_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchTagAddActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_tag_del_action(self, request):
"""批量删除标签
批量删除标签。
:param BatchTagDelActionRequest request
:return: BatchTagDelActionResponse
"""
return self.batch_tag_del_action_with_http_info(request)
def batch_tag_del_action_with_http_info(self, request):
"""批量删除标签
批量删除标签。
:param BatchTagDelActionRequest request
:return: BatchTagDelActionResponse
"""
all_params = ['instance_id', 'batch_tag_action_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchTagDelActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def change_failover_mode(self, request):
"""更改主备实例的数据同步方式
更改主备实例的数据同步方式。
:param ChangeFailoverModeRequest request
:return: ChangeFailoverModeResponse
"""
return self.change_failover_mode_with_http_info(request)
def change_failover_mode_with_http_info(self, request):
"""更改主备实例的数据同步方式
更改主备实例的数据同步方式。
:param ChangeFailoverModeRequest request
:return: ChangeFailoverModeResponse
"""
all_params = ['instance_id', 'failover_mode_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/failover/mode',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ChangeFailoverModeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def change_failover_strategy(self, request):
"""切换主备实例的倒换策略
切换主备实例的倒换策略.
:param ChangeFailoverStrategyRequest request
:return: ChangeFailoverStrategyResponse
"""
return self.change_failover_strategy_with_http_info(request)
def change_failover_strategy_with_http_info(self, request):
"""切换主备实例的倒换策略
切换主备实例的倒换策略.
:param ChangeFailoverStrategyRequest request
:return: ChangeFailoverStrategyResponse
"""
all_params = ['instance_id', 'failover_strategy_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/failover/strategy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ChangeFailoverStrategyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def change_ops_window(self, request):
"""设置可维护时间段
设置可维护时间段
:param ChangeOpsWindowRequest request
:return: ChangeOpsWindowResponse
"""
return self.change_ops_window_with_http_info(request)
def change_ops_window_with_http_info(self, request):
"""设置可维护时间段
设置可维护时间段
:param ChangeOpsWindowRequest request
:return: ChangeOpsWindowResponse
"""
all_params = ['instance_id', 'ops_window_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/ops-window',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ChangeOpsWindowResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_configuration(self, request):
"""创建参数模板
创建参数模板。
:param CreateConfigurationRequest request
:return: CreateConfigurationResponse
"""
return self.create_configuration_with_http_info(request)
def create_configuration_with_http_info(self, request):
"""创建参数模板
创建参数模板。
:param CreateConfigurationRequest request
:return: CreateConfigurationResponse
"""
all_params = ['create_configuration_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_dns_name(self, request):
"""申请域名
申请域名
:param CreateDnsNameRequest request
:return: CreateDnsNameResponse
"""
return self.create_dns_name_with_http_info(request)
def create_dns_name_with_http_info(self, request):
"""申请域名
申请域名
:param CreateDnsNameRequest request
:return: CreateDnsNameResponse
"""
all_params = ['instance_id', 'create_dns_name_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/create-dns',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateDnsNameResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_instance(self, request):
"""创建数据库实例
创建数据库实例。
:param CreateInstanceRequest request
:return: CreateInstanceResponse
"""
return self.create_instance_with_http_info(request)
def create_instance_with_http_info(self, request):
"""创建数据库实例
创建数据库实例。
:param CreateInstanceRequest request
:return: CreateInstanceResponse
"""
all_params = ['create_instance_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_manual_backup(self, request):
"""创建手动备份
创建手动备份。
:param CreateManualBackupRequest request
:return: CreateManualBackupResponse
"""
return self.create_manual_backup_with_http_info(request)
def create_manual_backup_with_http_info(self, request):
"""创建手动备份
创建手动备份。
:param CreateManualBackupRequest request
:return: CreateManualBackupResponse
"""
all_params = ['create_manual_backup_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backups',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateManualBackupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_restore_instance(self, request):
"""恢复到新实例
恢复到新实例。
:param CreateRestoreInstanceRequest request
:return: CreateRestoreInstanceResponse
"""
return self.create_restore_instance_with_http_info(request)
def create_restore_instance_with_http_info(self, request):
"""恢复到新实例
恢复到新实例。
:param CreateRestoreInstanceRequest request
:return: CreateRestoreInstanceResponse
"""
all_params = ['create_instance_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateRestoreInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_configuration(self, request):
"""删除参数模板
删除参数模板。
:param DeleteConfigurationRequest request
:return: DeleteConfigurationResponse
"""
return self.delete_configuration_with_http_info(request)
def delete_configuration_with_http_info(self, request):
"""删除参数模板
删除参数模板。
:param DeleteConfigurationRequest request
:return: DeleteConfigurationResponse
"""
all_params = ['config_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_instance(self, request):
"""删除数据库实例
删除数据库实例。
:param DeleteInstanceRequest request
:return: DeleteInstanceResponse
"""
return self.delete_instance_with_http_info(request)
def delete_instance_with_http_info(self, request):
"""删除数据库实例
删除数据库实例。
:param DeleteInstanceRequest request
:return: DeleteInstanceResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_manual_backup(self, request):
"""删除手动备份
删除手动备份。
:param DeleteManualBackupRequest request
:return: DeleteManualBackupResponse
"""
return self.delete_manual_backup_with_http_info(request)
def delete_manual_backup_with_http_info(self, request):
"""删除手动备份
删除手动备份。
:param DeleteManualBackupRequest request
:return: DeleteManualBackupResponse
"""
all_params = ['backup_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'backup_id' in local_var_params:
path_params['backup_id'] = local_var_params['backup_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backups/{backup_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteManualBackupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def download_slowlog(self, request):
"""获取慢日志下载链接
获取慢日志下载链接。
:param DownloadSlowlogRequest request
:return: DownloadSlowlogResponse
"""
return self.download_slowlog_with_http_info(request)
def download_slowlog_with_http_info(self, request):
"""获取慢日志下载链接
获取慢日志下载链接。
:param DownloadSlowlogRequest request
:return: DownloadSlowlogResponse
"""
all_params = ['instance_id', 'slowlog_download_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/slowlog-download',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DownloadSlowlogResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def enable_configuration(self, request):
"""应用参数模板
应用参数模板。
:param EnableConfigurationRequest request
:return: EnableConfigurationResponse
"""
return self.enable_configuration_with_http_info(request)
def enable_configuration_with_http_info(self, request):
"""应用参数模板
应用参数模板。
:param EnableConfigurationRequest request
:return: EnableConfigurationResponse
"""
all_params = ['config_id', 'apply_configuration_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}/apply',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='EnableConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_auditlogs(self, request):
"""获取审计日志列表
获取审计日志列表。
:param ListAuditlogsRequest request
:return: ListAuditlogsResponse
"""
return self.list_auditlogs_with_http_info(request)
def list_auditlogs_with_http_info(self, request):
"""获取审计日志列表
获取审计日志列表。
:param ListAuditlogsRequest request
:return: ListAuditlogsResponse
"""
all_params = ['instance_id', 'start_time', 'end_time', 'offset', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/auditlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAuditlogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_backups(self, request):
"""获取备份列表
获取备份列表。
:param ListBackupsRequest request
:return: ListBackupsResponse
"""
return self.list_backups_with_http_info(request)
def list_backups_with_http_info(self, request):
"""获取备份列表
获取备份列表。
:param ListBackupsRequest request
:return: ListBackupsResponse
"""
all_params = ['instance_id', 'x_language', 'backup_id', 'backup_type', 'offset', 'limit', 'begin_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'instance_id' in local_var_params:
query_params.append(('instance_id', local_var_params['instance_id']))
if 'backup_id' in local_var_params:
query_params.append(('backup_id', local_var_params['backup_id']))
if 'backup_type' in local_var_params:
query_params.append(('backup_type', local_var_params['backup_type']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'begin_time' in local_var_params:
query_params.append(('begin_time', local_var_params['begin_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backups',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListBackupsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_collations(self, request):
"""查询SQLServer可用字符集
查询SQLServer可用字符集
:param ListCollationsRequest request
:return: ListCollationsResponse
"""
return self.list_collations_with_http_info(request)
def list_collations_with_http_info(self, request):
"""查询SQLServer可用字符集
查询SQLServer可用字符集
:param ListCollationsRequest request
:return: ListCollationsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/collations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListCollationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_configurations(self, request):
"""获取参数模板列表
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListConfigurationsRequest request
:return: ListConfigurationsResponse
"""
return self.list_configurations_with_http_info(request)
def list_configurations_with_http_info(self, request):
"""获取参数模板列表
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListConfigurationsRequest request
:return: ListConfigurationsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListConfigurationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_datastores(self, request):
"""查询数据库引擎的版本
查询数据库引擎的版本。
:param ListDatastoresRequest request
:return: ListDatastoresResponse
"""
return self.list_datastores_with_http_info(request)
def list_datastores_with_http_info(self, request):
"""查询数据库引擎的版本
查询数据库引擎的版本。
:param ListDatastoresRequest request
:return: ListDatastoresResponse
"""
all_params = ['database_name', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'database_name' in local_var_params:
path_params['database_name'] = local_var_params['database_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/datastores/{database_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDatastoresResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_error_logs(self, request):
"""查询数据库错误日志
查询数据库错误日志。
:param ListErrorLogsRequest request
:return: ListErrorLogsResponse
"""
return self.list_error_logs_with_http_info(request)
def list_error_logs_with_http_info(self, request):
"""查询数据库错误日志
查询数据库错误日志。
:param ListErrorLogsRequest request
:return: ListErrorLogsResponse
"""
all_params = ['instance_id', 'start_date', 'end_date', 'x_language', 'offset', 'limit', 'level']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'level' in local_var_params:
query_params.append(('level', local_var_params['level']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/errorlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListErrorLogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_flavors(self, request):
"""查询数据库规格
查询数据库规格。
:param ListFlavorsRequest request
:return: ListFlavorsResponse
"""
return self.list_flavors_with_http_info(request)
def list_flavors_with_http_info(self, request):
"""查询数据库规格
查询数据库规格。
:param ListFlavorsRequest request
:return: ListFlavorsResponse
"""
all_params = ['database_name', 'x_language', 'version_name', 'spec_code']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'database_name' in local_var_params:
path_params['database_name'] = local_var_params['database_name']
query_params = []
if 'version_name' in local_var_params:
query_params.append(('version_name', local_var_params['version_name']))
if 'spec_code' in local_var_params:
query_params.append(('spec_code', local_var_params['spec_code']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/flavors/{database_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListFlavorsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_instances(self, request):
"""查询数据库实例列表
查询数据库实例列表。
:param ListInstancesRequest request
:return: ListInstancesResponse
"""
return self.list_instances_with_http_info(request)
def list_instances_with_http_info(self, request):
"""查询数据库实例列表
查询数据库实例列表。
:param ListInstancesRequest request
:return: ListInstancesResponse
"""
all_params = ['x_language', 'id', 'name', 'type', 'datastore_type', 'vpc_id', 'subnet_id', 'offset', 'limit', 'tags']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'datastore_type' in local_var_params:
query_params.append(('datastore_type', local_var_params['datastore_type']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
if 'subnet_id' in local_var_params:
query_params.append(('subnet_id', local_var_params['subnet_id']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_job_info(self, request):
"""获取指定ID的任务信息
获取指定ID的任务信息。
:param ListJobInfoRequest request
:return: ListJobInfoResponse
"""
return self.list_job_info_with_http_info(request)
def list_job_info_with_http_info(self, request):
"""获取指定ID的任务信息
获取指定ID的任务信息。
:param ListJobInfoRequest request
:return: ListJobInfoResponse
"""
all_params = ['id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/jobs',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListJobInfoResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_job_info_detail(self, request):
"""获取指定实例和时间范围的任务信息(SQL Server)
获取指定实例和时间范围的任务信息(SQL Server)。
:param ListJobInfoDetailRequest request
:return: ListJobInfoDetailResponse
"""
return self.list_job_info_detail_with_http_info(request)
def list_job_info_detail_with_http_info(self, request):
"""获取指定实例和时间范围的任务信息(SQL Server)
获取指定实例和时间范围的任务信息(SQL Server)。
:param ListJobInfoDetailRequest request
:return: ListJobInfoDetailResponse
"""
all_params = ['instance_id', 'start_time', 'x_language', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/tasklist/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListJobInfoDetailResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_off_site_backups(self, request):
"""查询跨区域备份列表
查询跨区域备份列表。
:param ListOffSiteBackupsRequest request
:return: ListOffSiteBackupsResponse
"""
return self.list_off_site_backups_with_http_info(request)
def list_off_site_backups_with_http_info(self, request):
"""查询跨区域备份列表
查询跨区域备份列表。
:param ListOffSiteBackupsRequest request
:return: ListOffSiteBackupsResponse
"""
all_params = ['instance_id', 'x_language', 'backup_id', 'backup_type', 'offset', 'limit', 'begin_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'instance_id' in local_var_params:
query_params.append(('instance_id', local_var_params['instance_id']))
if 'backup_id' in local_var_params:
query_params.append(('backup_id', local_var_params['backup_id']))
if 'backup_type' in local_var_params:
query_params.append(('backup_type', local_var_params['backup_type']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'begin_time' in local_var_params:
query_params.append(('begin_time', local_var_params['begin_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/offsite-backups',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListOffSiteBackupsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_off_site_instances(self, request):
"""查询跨区域备份实例列表
查询跨区域备份实例列表。
:param ListOffSiteInstancesRequest request
:return: ListOffSiteInstancesResponse
"""
return self.list_off_site_instances_with_http_info(request)
def list_off_site_instances_with_http_info(self, request):
"""查询跨区域备份实例列表
查询跨区域备份实例列表。
:param ListOffSiteInstancesRequest request
:return: ListOffSiteInstancesResponse
"""
all_params = ['x_language', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/backups/offsite-backup-instance',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListOffSiteInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_off_site_restore_times(self, request):
"""查询跨区域备份可恢复时间段
查询跨区域备份可恢复时间段。 如果您备份策略中的保存天数设置较长,建议您传入查询日期“date”。
:param ListOffSiteRestoreTimesRequest request
:return: ListOffSiteRestoreTimesResponse
"""
return self.list_off_site_restore_times_with_http_info(request)
def list_off_site_restore_times_with_http_info(self, request):
"""查询跨区域备份可恢复时间段
查询跨区域备份可恢复时间段。 如果您备份策略中的保存天数设置较长,建议您传入查询日期“date”。
:param ListOffSiteRestoreTimesRequest request
:return: ListOffSiteRestoreTimesResponse
"""
all_params = ['instance_id', 'x_language', 'date']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'date' in local_var_params:
query_params.append(('date', local_var_params['date']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/offsite-restore-time',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListOffSiteRestoreTimesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_project_tags(self, request):
"""查询项目标签
查询项目标签。
:param ListProjectTagsRequest request
:return: ListProjectTagsResponse
"""
return self.list_project_tags_with_http_info(request)
def list_project_tags_with_http_info(self, request):
"""查询项目标签
查询项目标签。
:param ListProjectTagsRequest request
:return: ListProjectTagsResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListProjectTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_restore_times(self, request):
"""查询可恢复时间段
查询可恢复时间段。 如果您备份策略中的保存天数设置较长,建议您传入查询日期“date”。
:param ListRestoreTimesRequest request
:return: ListRestoreTimesResponse
"""
return self.list_restore_times_with_http_info(request)
def list_restore_times_with_http_info(self, request):
"""查询可恢复时间段
查询可恢复时间段。 如果您备份策略中的保存天数设置较长,建议您传入查询日期“date”。
:param ListRestoreTimesRequest request
:return: ListRestoreTimesResponse
"""
all_params = ['instance_id', 'x_language', 'date']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'date' in local_var_params:
query_params.append(('date', local_var_params['date']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/restore-time',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRestoreTimesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_slow_logs(self, request):
"""查询数据库慢日志
查询数据库慢日志。
:param ListSlowLogsRequest request
:return: ListSlowLogsResponse
"""
return self.list_slow_logs_with_http_info(request)
def list_slow_logs_with_http_info(self, request):
"""查询数据库慢日志
查询数据库慢日志。
:param ListSlowLogsRequest request
:return: ListSlowLogsResponse
"""
all_params = ['instance_id', 'start_date', 'end_date', 'x_language', 'offset', 'limit', 'type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/slowlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSlowLogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_slowlog_statistics(self, request):
"""获取慢日志统计信息
获取慢日志统计信息
:param ListSlowlogStatisticsRequest request
:return: ListSlowlogStatisticsResponse
"""
return self.list_slowlog_statistics_with_http_info(request)
def list_slowlog_statistics_with_http_info(self, request):
"""获取慢日志统计信息
获取慢日志统计信息
:param ListSlowlogStatisticsRequest request
:return: ListSlowlogStatisticsResponse
"""
all_params = ['instance_id', 'cur_page', 'per_page', 'start_date', 'end_date', 'type', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'cur_page' in local_var_params:
query_params.append(('cur_page', local_var_params['cur_page']))
if 'per_page' in local_var_params:
query_params.append(('per_page', local_var_params['per_page']))
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/slowlog/statistics',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSlowlogStatisticsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_storage_types(self, request):
"""查询数据库磁盘类型
查询数据库磁盘类型。
:param ListStorageTypesRequest request
:return: ListStorageTypesResponse
"""
return self.list_storage_types_with_http_info(request)
def list_storage_types_with_http_info(self, request):
"""查询数据库磁盘类型
查询数据库磁盘类型。
:param ListStorageTypesRequest request
:return: ListStorageTypesResponse
"""
all_params = ['database_name', 'version_name', 'x_language', 'ha_mode']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'database_name' in local_var_params:
path_params['database_name'] = local_var_params['database_name']
query_params = []
if 'version_name' in local_var_params:
query_params.append(('version_name', local_var_params['version_name']))
if 'ha_mode' in local_var_params:
query_params.append(('ha_mode', local_var_params['ha_mode']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/storage-type/{database_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListStorageTypesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def migrate_follower(self, request):
"""迁移主备实例的备机
迁移主备实例的备机
:param MigrateFollowerRequest request
:return: MigrateFollowerResponse
"""
return self.migrate_follower_with_http_info(request)
def migrate_follower_with_http_info(self, request):
"""迁移主备实例的备机
迁移主备实例的备机
:param MigrateFollowerRequest request
:return: MigrateFollowerResponse
"""
all_params = ['instance_id', 'migrate_follower_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/migrateslave',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='MigrateFollowerResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def restore_tables(self, request):
"""表级时间点恢复(MySQL)
表级时间点恢复(MySQL)。
:param RestoreTablesRequest request
:return: RestoreTablesResponse
"""
return self.restore_tables_with_http_info(request)
def restore_tables_with_http_info(self, request):
"""表级时间点恢复(MySQL)
表级时间点恢复(MySQL)。
:param RestoreTablesRequest request
:return: RestoreTablesResponse
"""
all_params = ['instance_id', 'restore_tables_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/restore/tables',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RestoreTablesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def restore_to_existing_instance(self, request):
"""恢复到已有实例
恢复到已有实例。
:param RestoreToExistingInstanceRequest request
:return: RestoreToExistingInstanceResponse
"""
return self.restore_to_existing_instance_with_http_info(request)
def restore_to_existing_instance_with_http_info(self, request):
"""恢复到已有实例
恢复到已有实例。
:param RestoreToExistingInstanceRequest request
:return: RestoreToExistingInstanceResponse
"""
all_params = ['restore_to_existing_instance_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/recovery',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RestoreToExistingInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_auditlog_policy(self, request):
"""设置审计日志策略
设置审计日志策略。
:param SetAuditlogPolicyRequest request
:return: SetAuditlogPolicyResponse
"""
return self.set_auditlog_policy_with_http_info(request)
def set_auditlog_policy_with_http_info(self, request):
"""设置审计日志策略
设置审计日志策略。
:param SetAuditlogPolicyRequest request
:return: SetAuditlogPolicyResponse
"""
all_params = ['instance_id', 'set_auditlog_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/auditlog-policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetAuditlogPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_backup_policy(self, request):
"""设置自动备份策略
设置自动备份策略。
:param SetBackupPolicyRequest request
:return: SetBackupPolicyResponse
"""
return self.set_backup_policy_with_http_info(request)
def set_backup_policy_with_http_info(self, request):
"""设置自动备份策略
设置自动备份策略。
:param SetBackupPolicyRequest request
:return: SetBackupPolicyResponse
"""
all_params = ['instance_id', 'set_backup_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_binlog_clear_policy(self, request):
"""设置binlog本地保留时长
修改指定实例的binlog本地保留时长。
:param SetBinlogClearPolicyRequest request
:return: SetBinlogClearPolicyResponse
"""
return self.set_binlog_clear_policy_with_http_info(request)
def set_binlog_clear_policy_with_http_info(self, request):
"""设置binlog本地保留时长
修改指定实例的binlog本地保留时长。
:param SetBinlogClearPolicyRequest request
:return: SetBinlogClearPolicyResponse
"""
all_params = ['instance_id', 'binlog_clear_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/binlog/clear-policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetBinlogClearPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_off_site_backup_policy(self, request):
"""设置跨区域备份策略
设置跨区域备份策略。
:param SetOffSiteBackupPolicyRequest request
:return: SetOffSiteBackupPolicyResponse
"""
return self.set_off_site_backup_policy_with_http_info(request)
def set_off_site_backup_policy_with_http_info(self, request):
"""设置跨区域备份策略
设置跨区域备份策略。
:param SetOffSiteBackupPolicyRequest request
:return: SetOffSiteBackupPolicyResponse
"""
all_params = ['instance_id', 'set_off_site_backup_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/offsite-policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetOffSiteBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_security_group(self, request):
"""修改安全组
修改安全组
:param SetSecurityGroupRequest request
:return: SetSecurityGroupResponse
"""
return self.set_security_group_with_http_info(request)
def set_security_group_with_http_info(self, request):
"""修改安全组
修改安全组
:param SetSecurityGroupRequest request
:return: SetSecurityGroupResponse
"""
all_params = ['instance_id', 'security_group_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/security-group',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetSecurityGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_auditlog_download_link(self, request):
"""生成审计日志下载链接
生成审计日志下载链接。
:param ShowAuditlogDownloadLinkRequest request
:return: ShowAuditlogDownloadLinkResponse
"""
return self.show_auditlog_download_link_with_http_info(request)
def show_auditlog_download_link_with_http_info(self, request):
"""生成审计日志下载链接
生成审计日志下载链接。
:param ShowAuditlogDownloadLinkRequest request
:return: ShowAuditlogDownloadLinkResponse
"""
all_params = ['instance_id', 'generate_auditlog_download_link_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/auditlog-links',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowAuditlogDownloadLinkResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_auditlog_policy(self, request):
"""查询审计日志策略
查询审计日志策略。
:param ShowAuditlogPolicyRequest request
:return: ShowAuditlogPolicyResponse
"""
return self.show_auditlog_policy_with_http_info(request)
def show_auditlog_policy_with_http_info(self, request):
"""查询审计日志策略
查询审计日志策略。
:param ShowAuditlogPolicyRequest request
:return: ShowAuditlogPolicyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/auditlog-policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowAuditlogPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_backup_download_link(self, request):
"""获取备份下载链接
获取备份下载链接。
:param ShowBackupDownloadLinkRequest request
:return: ShowBackupDownloadLinkResponse
"""
return self.show_backup_download_link_with_http_info(request)
def show_backup_download_link_with_http_info(self, request):
"""获取备份下载链接
获取备份下载链接。
:param ShowBackupDownloadLinkRequest request
:return: ShowBackupDownloadLinkResponse
"""
all_params = ['backup_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'backup_id' in local_var_params:
query_params.append(('backup_id', local_var_params['backup_id']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/backup-files',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowBackupDownloadLinkResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_backup_policy(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowBackupPolicyRequest request
:return: ShowBackupPolicyResponse
"""
return self.show_backup_policy_with_http_info(request)
def show_backup_policy_with_http_info(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowBackupPolicyRequest request
:return: ShowBackupPolicyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_binlog_clear_policy(self, request):
"""获取binlog本地保留时长
查寻指定实例的binlog本地保留时长。
:param ShowBinlogClearPolicyRequest request
:return: ShowBinlogClearPolicyResponse
"""
return self.show_binlog_clear_policy_with_http_info(request)
def show_binlog_clear_policy_with_http_info(self, request):
"""获取binlog本地保留时长
查寻指定实例的binlog本地保留时长。
:param ShowBinlogClearPolicyRequest request
:return: ShowBinlogClearPolicyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/binlog/clear-policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowBinlogClearPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_configuration(self, request):
"""获取指定参数模板的参数
获取指定参数模板的参数。
:param ShowConfigurationRequest request
:return: ShowConfigurationResponse
"""
return self.show_configuration_with_http_info(request)
def show_configuration_with_http_info(self, request):
"""获取指定参数模板的参数
获取指定参数模板的参数。
:param ShowConfigurationRequest request
:return: ShowConfigurationResponse
"""
all_params = ['config_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_instance_configuration(self, request):
"""获取指定实例的参数模板
获取指定实例的参数模板。
:param ShowInstanceConfigurationRequest request
:return: ShowInstanceConfigurationResponse
"""
return self.show_instance_configuration_with_http_info(request)
def show_instance_configuration_with_http_info(self, request):
"""获取指定实例的参数模板
获取指定实例的参数模板。
:param ShowInstanceConfigurationRequest request
:return: ShowInstanceConfigurationResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/configurations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowInstanceConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_off_site_backup_policy(self, request):
"""查询跨区域备份策略
查询跨区域备份策略。
:param ShowOffSiteBackupPolicyRequest request
:return: ShowOffSiteBackupPolicyResponse
"""
return self.show_off_site_backup_policy_with_http_info(request)
def show_off_site_backup_policy_with_http_info(self, request):
"""查询跨区域备份策略
查询跨区域备份策略。
:param ShowOffSiteBackupPolicyRequest request
:return: ShowOffSiteBackupPolicyResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/offsite-policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowOffSiteBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_quotas(self, request):
"""查询配额
查询当前项目下资源配额情况。
:param ShowQuotasRequest request
:return: ShowQuotasResponse
"""
return self.show_quotas_with_http_info(request)
def show_quotas_with_http_info(self, request):
"""查询配额
查询当前项目下资源配额情况。
:param ShowQuotasRequest request
:return: ShowQuotasResponse
"""
all_params = ['x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_failover(self, request):
"""手动倒换主备
手动倒换主备.
:param StartFailoverRequest request
:return: StartFailoverResponse
"""
return self.start_failover_with_http_info(request)
def start_failover_with_http_info(self, request):
"""手动倒换主备
手动倒换主备.
:param StartFailoverRequest request
:return: StartFailoverResponse
"""
all_params = ['instance_id', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/failover',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartFailoverResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_instance_enlarge_volume_action(self, request):
"""扩容数据库实例的磁盘空间
扩容数据库实例的磁盘空间。
:param StartInstanceEnlargeVolumeActionRequest request
:return: StartInstanceEnlargeVolumeActionResponse
"""
return self.start_instance_enlarge_volume_action_with_http_info(request)
def start_instance_enlarge_volume_action_with_http_info(self, request):
"""扩容数据库实例的磁盘空间
扩容数据库实例的磁盘空间。
:param StartInstanceEnlargeVolumeActionRequest request
:return: StartInstanceEnlargeVolumeActionResponse
"""
all_params = ['instance_id', 'enlarge_volume_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartInstanceEnlargeVolumeActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_instance_restart_action(self, request):
"""重启数据库实例
重启数据库实例。
:param StartInstanceRestartActionRequest request
:return: StartInstanceRestartActionResponse
"""
return self.start_instance_restart_action_with_http_info(request)
def start_instance_restart_action_with_http_info(self, request):
"""重启数据库实例
重启数据库实例。
:param StartInstanceRestartActionRequest request
:return: StartInstanceRestartActionResponse
"""
all_params = ['instance_id', 'instance_restart_requset_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartInstanceRestartActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_instance_single_to_ha_action(self, request):
"""单机转主备实例
单机转主备实例。
:param StartInstanceSingleToHaActionRequest request
:return: StartInstanceSingleToHaActionResponse
"""
return self.start_instance_single_to_ha_action_with_http_info(request)
def start_instance_single_to_ha_action_with_http_info(self, request):
"""单机转主备实例
单机转主备实例。
:param StartInstanceSingleToHaActionRequest request
:return: StartInstanceSingleToHaActionResponse
"""
all_params = ['instance_id', 'instance_single_to_ha_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartInstanceSingleToHaActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_recycle_policy(self, request):
"""设置回收站策略
设置回收站策略。
:param StartRecyclePolicyRequest request
:return: StartRecyclePolicyResponse
"""
return self.start_recycle_policy_with_http_info(request)
def start_recycle_policy_with_http_info(self, request):
"""设置回收站策略
设置回收站策略。
:param StartRecyclePolicyRequest request
:return: StartRecyclePolicyResponse
"""
all_params = ['recycle_policy_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/recycle-policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartRecyclePolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def start_resize_flavor_action(self, request):
"""变更数据库实例的规格
变更数据库实例的规格。
:param StartResizeFlavorActionRequest request
:return: StartResizeFlavorActionResponse
"""
return self.start_resize_flavor_action_with_http_info(request)
def start_resize_flavor_action_with_http_info(self, request):
"""变更数据库实例的规格
变更数据库实例的规格。
:param StartResizeFlavorActionRequest request
:return: StartResizeFlavorActionResponse
"""
all_params = ['instance_id', 'resize_flavor_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='StartResizeFlavorActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def switch_ssl(self, request):
"""设置SSL数据加密
设置SSL数据加密。
:param SwitchSslRequest request
:return: SwitchSslResponse
"""
return self.switch_ssl_with_http_info(request)
def switch_ssl_with_http_info(self, request):
"""设置SSL数据加密
设置SSL数据加密。
:param SwitchSslRequest request
:return: SwitchSslResponse
"""
all_params = ['instance_id', 'ssl_option_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/ssl',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SwitchSslResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_configuration(self, request):
"""修改参数模板参数
修改参数模板参数。
:param UpdateConfigurationRequest request
:return: UpdateConfigurationResponse
"""
return self.update_configuration_with_http_info(request)
def update_configuration_with_http_info(self, request):
"""修改参数模板参数
修改参数模板参数。
:param UpdateConfigurationRequest request
:return: UpdateConfigurationResponse
"""
all_params = ['config_id', 'update_configuration_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_data_ip(self, request):
"""修改内网地址
修改内网地址
:param UpdateDataIpRequest request
:return: UpdateDataIpResponse
"""
return self.update_data_ip_with_http_info(request)
def update_data_ip_with_http_info(self, request):
"""修改内网地址
修改内网地址
:param UpdateDataIpRequest request
:return: UpdateDataIpResponse
"""
all_params = ['instance_id', 'data_ip_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/ip',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateDataIpResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_dns_name(self, request):
"""修改域名
修改域名
:param UpdateDnsNameRequest request
:return: UpdateDnsNameResponse
"""
return self.update_dns_name_with_http_info(request)
def update_dns_name_with_http_info(self, request):
"""修改域名
修改域名
:param UpdateDnsNameRequest request
:return: UpdateDnsNameResponse
"""
all_params = ['instance_id', 'modify_dns_name_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/modify-dns',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateDnsNameResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_instance_configuration(self, request):
"""修改指定实例的参数
修改指定实例的参数。
:param UpdateInstanceConfigurationRequest request
:return: UpdateInstanceConfigurationResponse
"""
return self.update_instance_configuration_with_http_info(request)
def update_instance_configuration_with_http_info(self, request):
"""修改指定实例的参数
修改指定实例的参数。
:param UpdateInstanceConfigurationRequest request
:return: UpdateInstanceConfigurationResponse
"""
all_params = ['instance_id', 'update_instance_configuration_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/configurations',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateInstanceConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_instance_name(self, request):
"""修改实例名称
修改实例名称。
:param UpdateInstanceNameRequest request
:return: UpdateInstanceNameResponse
"""
return self.update_instance_name_with_http_info(request)
def update_instance_name_with_http_info(self, request):
"""修改实例名称
修改实例名称。
:param UpdateInstanceNameRequest request
:return: UpdateInstanceNameResponse
"""
all_params = ['instance_id', 'modify_instance_name_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/name',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateInstanceNameResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_port(self, request):
"""修改数据库端口
修改数据库端口
:param UpdatePortRequest request
:return: UpdatePortResponse
"""
return self.update_port_with_http_info(request)
def update_port_with_http_info(self, request):
"""修改数据库端口
修改数据库端口
:param UpdatePortRequest request
:return: UpdatePortResponse
"""
all_params = ['instance_id', 'update_db_port_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/port',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePortResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_postgresql_instance_alias(self, request):
"""修改实例备注信息
修改指定数据库实例的备注信息。
:param UpdatePostgresqlInstanceAliasRequest request
:return: UpdatePostgresqlInstanceAliasResponse
"""
return self.update_postgresql_instance_alias_with_http_info(request)
def update_postgresql_instance_alias_with_http_info(self, request):
"""修改实例备注信息
修改指定数据库实例的备注信息。
:param UpdatePostgresqlInstanceAliasRequest request
:return: UpdatePostgresqlInstanceAliasResponse
"""
all_params = ['instance_id', 'update_rds_instance_alias_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/alias',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePostgresqlInstanceAliasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def allow_db_user_privilege(self, request):
"""授权数据库帐号
授权数据库帐号。
:param AllowDbUserPrivilegeRequest request
:return: AllowDbUserPrivilegeResponse
"""
return self.allow_db_user_privilege_with_http_info(request)
def allow_db_user_privilege_with_http_info(self, request):
"""授权数据库帐号
授权数据库帐号。
:param AllowDbUserPrivilegeRequest request
:return: AllowDbUserPrivilegeResponse
"""
all_params = ['instance_id', 'grant_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_privilege',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AllowDbUserPrivilegeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_database(self, request):
"""创建数据库
创建数据库。
:param CreateDatabaseRequest request
:return: CreateDatabaseResponse
"""
return self.create_database_with_http_info(request)
def create_database_with_http_info(self, request):
"""创建数据库
创建数据库。
:param CreateDatabaseRequest request
:return: CreateDatabaseResponse
"""
all_params = ['instance_id', 'create_database_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_db_user(self, request):
"""创建数据库用户
创建数据库用户。
:param CreateDbUserRequest request
:return: CreateDbUserResponse
"""
return self.create_db_user_with_http_info(request)
def create_db_user_with_http_info(self, request):
"""创建数据库用户
创建数据库用户。
:param CreateDbUserRequest request
:return: CreateDbUserResponse
"""
all_params = ['instance_id', 'create_db_user_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateDbUserResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_database(self, request):
"""删除数据库
删除数据库。
:param DeleteDatabaseRequest request
:return: DeleteDatabaseResponse
"""
return self.delete_database_with_http_info(request)
def delete_database_with_http_info(self, request):
"""删除数据库
删除数据库。
:param DeleteDatabaseRequest request
:return: DeleteDatabaseResponse
"""
all_params = ['instance_id', 'db_name', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
if 'db_name' in local_var_params:
path_params['db_name'] = local_var_params['db_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/{db_name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_db_user(self, request):
"""删除数据库用户
删除数据库用户。
:param DeleteDbUserRequest request
:return: DeleteDbUserResponse
"""
return self.delete_db_user_with_http_info(request)
def delete_db_user_with_http_info(self, request):
"""删除数据库用户
删除数据库用户。
:param DeleteDbUserRequest request
:return: DeleteDbUserResponse
"""
all_params = ['instance_id', 'user_name', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
if 'user_name' in local_var_params:
path_params['user_name'] = local_var_params['user_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/{user_name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteDbUserResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_authorized_databases(self, request):
"""查询指定用户的已授权数据库
查询指定用户的已授权数据库。
:param ListAuthorizedDatabasesRequest request
:return: ListAuthorizedDatabasesResponse
"""
return self.list_authorized_databases_with_http_info(request)
def list_authorized_databases_with_http_info(self, request):
"""查询指定用户的已授权数据库
查询指定用户的已授权数据库。
:param ListAuthorizedDatabasesRequest request
:return: ListAuthorizedDatabasesResponse
"""
all_params = ['instance_id', 'user_name', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'user_name' in local_var_params:
query_params.append(('user-name', local_var_params['user_name']))
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/database',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAuthorizedDatabasesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_authorized_db_users(self, request):
"""查询指定数据库的已授权用户
查询指定数据库的已授权用户。
:param ListAuthorizedDbUsersRequest request
:return: ListAuthorizedDbUsersResponse
"""
return self.list_authorized_db_users_with_http_info(request)
def list_authorized_db_users_with_http_info(self, request):
"""查询指定数据库的已授权用户
查询指定数据库的已授权用户。
:param ListAuthorizedDbUsersRequest request
:return: ListAuthorizedDbUsersResponse
"""
all_params = ['instance_id', 'db_name', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'db_name' in local_var_params:
query_params.append(('db-name', local_var_params['db_name']))
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/db_user',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAuthorizedDbUsersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_databases(self, request):
"""查询数据库列表
查询数据库列表。
:param ListDatabasesRequest request
:return: ListDatabasesResponse
"""
return self.list_databases_with_http_info(request)
def list_databases_with_http_info(self, request):
"""查询数据库列表
查询数据库列表。
:param ListDatabasesRequest request
:return: ListDatabasesResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDatabasesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_db_users(self, request):
"""查询数据库用户列表
查询数据库用户列表。
:param ListDbUsersRequest request
:return: ListDbUsersResponse
"""
return self.list_db_users_with_http_info(request)
def list_db_users_with_http_info(self, request):
"""查询数据库用户列表
查询数据库用户列表。
:param ListDbUsersRequest request
:return: ListDbUsersResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDbUsersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def reset_pwd(self, request):
"""重置数据库密码
重置数据库密码.
:param ResetPwdRequest request
:return: ResetPwdResponse
"""
return self.reset_pwd_with_http_info(request)
def reset_pwd_with_http_info(self, request):
"""重置数据库密码
重置数据库密码.
:param ResetPwdRequest request
:return: ResetPwdResponse
"""
all_params = ['instance_id', 'pwd_reset_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/password',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResetPwdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def revoke(self, request):
"""解除数据库帐号权限
解除数据库帐号权限。
:param RevokeRequest request
:return: RevokeResponse
"""
return self.revoke_with_http_info(request)
def revoke_with_http_info(self, request):
"""解除数据库帐号权限
解除数据库帐号权限。
:param RevokeRequest request
:return: RevokeResponse
"""
all_params = ['instance_id', 'revoke_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_privilege',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RevokeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_db_user_pwd(self, request):
"""设置数据库账号密码
设置数据库账号密码
:param SetDbUserPwdRequest request
:return: SetDbUserPwdResponse
"""
return self.set_db_user_pwd_with_http_info(request)
def set_db_user_pwd_with_http_info(self, request):
"""设置数据库账号密码
设置数据库账号密码
:param SetDbUserPwdRequest request
:return: SetDbUserPwdResponse
"""
all_params = ['instance_id', 'db_user_pwd_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/resetpwd',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetDbUserPwdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_database(self, request):
"""修改指定实例的数据库备注
修改指定实例中的数据库备注。
:param UpdateDatabaseRequest request
:return: UpdateDatabaseResponse
"""
return self.update_database_with_http_info(request)
def update_database_with_http_info(self, request):
"""修改指定实例的数据库备注
修改指定实例中的数据库备注。
:param UpdateDatabaseRequest request
:return: UpdateDatabaseResponse
"""
all_params = ['instance_id', 'update_database_req', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/update',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def allow_db_privilege(self, request):
"""授权数据库帐号
在指定实例的数据库中, 设置帐号的权限。
:param AllowDbPrivilegeRequest request
:return: AllowDbPrivilegeResponse
"""
return self.allow_db_privilege_with_http_info(request)
def allow_db_privilege_with_http_info(self, request):
"""授权数据库帐号
在指定实例的数据库中, 设置帐号的权限。
:param AllowDbPrivilegeRequest request
:return: AllowDbPrivilegeResponse
"""
all_params = ['instance_id', 'grant_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_privilege',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AllowDbPrivilegeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_postgresql_database(self, request):
"""创建数据库
在指定实例中创建数据库。
:param CreatePostgresqlDatabaseRequest request
:return: CreatePostgresqlDatabaseResponse
"""
return self.create_postgresql_database_with_http_info(request)
def create_postgresql_database_with_http_info(self, request):
"""创建数据库
在指定实例中创建数据库。
:param CreatePostgresqlDatabaseRequest request
:return: CreatePostgresqlDatabaseResponse
"""
all_params = ['instance_id', 'create_database_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePostgresqlDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_postgresql_database_schema(self, request):
"""创建数据库SCHEMA
在指定实例的数据库中, 创建数据库schema。
:param CreatePostgresqlDatabaseSchemaRequest request
:return: CreatePostgresqlDatabaseSchemaResponse
"""
return self.create_postgresql_database_schema_with_http_info(request)
def create_postgresql_database_schema_with_http_info(self, request):
"""创建数据库SCHEMA
在指定实例的数据库中, 创建数据库schema。
:param CreatePostgresqlDatabaseSchemaRequest request
:return: CreatePostgresqlDatabaseSchemaResponse
"""
all_params = ['instance_id', 'db_schema_req', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/schema',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePostgresqlDatabaseSchemaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_postgresql_db_user(self, request):
"""创建数据库用户
在指定实例中创建数据库用户。
:param CreatePostgresqlDbUserRequest request
:return: CreatePostgresqlDbUserResponse
"""
return self.create_postgresql_db_user_with_http_info(request)
def create_postgresql_db_user_with_http_info(self, request):
"""创建数据库用户
在指定实例中创建数据库用户。
:param CreatePostgresqlDbUserRequest request
:return: CreatePostgresqlDbUserResponse
"""
all_params = ['instance_id', 'create_db_user_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePostgresqlDbUserResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_postgresql_database_schemas(self, request):
"""查询数据库SCHEMA列表
查询指定实例的数据库SCHEMA列表。
:param ListPostgresqlDatabaseSchemasRequest request
:return: ListPostgresqlDatabaseSchemasResponse
"""
return self.list_postgresql_database_schemas_with_http_info(request)
def list_postgresql_database_schemas_with_http_info(self, request):
"""查询数据库SCHEMA列表
查询指定实例的数据库SCHEMA列表。
:param ListPostgresqlDatabaseSchemasRequest request
:return: ListPostgresqlDatabaseSchemasResponse
"""
all_params = ['instance_id', 'db_name', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'db_name' in local_var_params:
query_params.append(('db_name', local_var_params['db_name']))
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/schema/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPostgresqlDatabaseSchemasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_postgresql_databases(self, request):
"""查询数据库列表
查询指定实例中的数据库列表。
:param ListPostgresqlDatabasesRequest request
:return: ListPostgresqlDatabasesResponse
"""
return self.list_postgresql_databases_with_http_info(request)
def list_postgresql_databases_with_http_info(self, request):
"""查询数据库列表
查询指定实例中的数据库列表。
:param ListPostgresqlDatabasesRequest request
:return: ListPostgresqlDatabasesResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPostgresqlDatabasesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_postgresql_db_user_paginated(self, request):
"""查询数据库用户列表
在指定实例中查询数据库用户列表。
:param ListPostgresqlDbUserPaginatedRequest request
:return: ListPostgresqlDbUserPaginatedResponse
"""
return self.list_postgresql_db_user_paginated_with_http_info(request)
def list_postgresql_db_user_paginated_with_http_info(self, request):
"""查询数据库用户列表
在指定实例中查询数据库用户列表。
:param ListPostgresqlDbUserPaginatedRequest request
:return: ListPostgresqlDbUserPaginatedResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPostgresqlDbUserPaginatedResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_postgresql_db_user_pwd(self, request):
"""重置数据库帐号密码
重置指定数据库帐号的密码。
:param SetPostgresqlDbUserPwdRequest request
:return: SetPostgresqlDbUserPwdResponse
"""
return self.set_postgresql_db_user_pwd_with_http_info(request)
def set_postgresql_db_user_pwd_with_http_info(self, request):
"""重置数据库帐号密码
重置指定数据库帐号的密码。
:param SetPostgresqlDbUserPwdRequest request
:return: SetPostgresqlDbUserPwdResponse
"""
all_params = ['instance_id', 'db_user_pwd_request_body', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/resetpwd',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetPostgresqlDbUserPwdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def allow_sqlserver_db_user_privilege(self, request):
"""授权数据库帐号
授权数据库帐号。
:param AllowSqlserverDbUserPrivilegeRequest request
:return: AllowSqlserverDbUserPrivilegeResponse
"""
return self.allow_sqlserver_db_user_privilege_with_http_info(request)
def allow_sqlserver_db_user_privilege_with_http_info(self, request):
"""授权数据库帐号
授权数据库帐号。
:param AllowSqlserverDbUserPrivilegeRequest request
:return: AllowSqlserverDbUserPrivilegeResponse
"""
all_params = ['instance_id', 'grant_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_privilege',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AllowSqlserverDbUserPrivilegeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_sqlserver_database(self, request):
"""创建数据库
创建数据库。
:param CreateSqlserverDatabaseRequest request
:return: CreateSqlserverDatabaseResponse
"""
return self.create_sqlserver_database_with_http_info(request)
def create_sqlserver_database_with_http_info(self, request):
"""创建数据库
创建数据库。
:param CreateSqlserverDatabaseRequest request
:return: CreateSqlserverDatabaseResponse
"""
all_params = ['instance_id', 'create_database_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateSqlserverDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_sqlserver_db_user(self, request):
"""创建数据库用户
创建数据库用户。
:param CreateSqlserverDbUserRequest request
:return: CreateSqlserverDbUserResponse
"""
return self.create_sqlserver_db_user_with_http_info(request)
def create_sqlserver_db_user_with_http_info(self, request):
"""创建数据库用户
创建数据库用户。
:param CreateSqlserverDbUserRequest request
:return: CreateSqlserverDbUserResponse
"""
all_params = ['instance_id', 'create_db_user_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateSqlserverDbUserResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_sqlserver_database(self, request):
"""删除数据库
删除数据库。
:param DeleteSqlserverDatabaseRequest request
:return: DeleteSqlserverDatabaseResponse
"""
return self.delete_sqlserver_database_with_http_info(request)
def delete_sqlserver_database_with_http_info(self, request):
"""删除数据库
删除数据库。
:param DeleteSqlserverDatabaseRequest request
:return: DeleteSqlserverDatabaseResponse
"""
all_params = ['instance_id', 'db_name', 'x_language', 'drop_database_v3_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
if 'db_name' in local_var_params:
path_params['db_name'] = local_var_params['db_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/{db_name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteSqlserverDatabaseResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_sqlserver_db_user(self, request):
"""删除数据库用户
删除数据库用户。
:param DeleteSqlserverDbUserRequest request
:return: DeleteSqlserverDbUserResponse
"""
return self.delete_sqlserver_db_user_with_http_info(request)
def delete_sqlserver_db_user_with_http_info(self, request):
"""删除数据库用户
删除数据库用户。
:param DeleteSqlserverDbUserRequest request
:return: DeleteSqlserverDbUserResponse
"""
all_params = ['instance_id', 'user_name', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
if 'user_name' in local_var_params:
path_params['user_name'] = local_var_params['user_name']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/{user_name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteSqlserverDbUserResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_authorized_sqlserver_db_users(self, request):
"""查询指定数据库的已授权用户
查询指定数据库的已授权用户。
:param ListAuthorizedSqlserverDbUsersRequest request
:return: ListAuthorizedSqlserverDbUsersResponse
"""
return self.list_authorized_sqlserver_db_users_with_http_info(request)
def list_authorized_sqlserver_db_users_with_http_info(self, request):
"""查询指定数据库的已授权用户
查询指定数据库的已授权用户。
:param ListAuthorizedSqlserverDbUsersRequest request
:return: ListAuthorizedSqlserverDbUsersResponse
"""
all_params = ['instance_id', 'db_name', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'db_name' in local_var_params:
query_params.append(('db-name', local_var_params['db_name']))
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/db_user',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListAuthorizedSqlserverDbUsersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_sqlserver_databases(self, request):
"""查询数据库列表
查询数据库列表。
:param ListSqlserverDatabasesRequest request
:return: ListSqlserverDatabasesResponse
"""
return self.list_sqlserver_databases_with_http_info(request)
def list_sqlserver_databases_with_http_info(self, request):
"""查询数据库列表
查询数据库列表。
:param ListSqlserverDatabasesRequest request
:return: ListSqlserverDatabasesResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language', 'db_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'db_name' in local_var_params:
query_params.append(('db-name', local_var_params['db_name']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/database/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSqlserverDatabasesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_sqlserver_db_users(self, request):
"""查询数据库用户列表
查询数据库用户列表。
:param ListSqlserverDbUsersRequest request
:return: ListSqlserverDbUsersResponse
"""
return self.list_sqlserver_db_users_with_http_info(request)
def list_sqlserver_db_users_with_http_info(self, request):
"""查询数据库用户列表
查询数据库用户列表。
:param ListSqlserverDbUsersRequest request
:return: ListSqlserverDbUsersResponse
"""
all_params = ['instance_id', 'page', 'limit', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_user/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSqlserverDbUsersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def revoke_sqlserver_db_user_privilege(self, request):
"""解除数据库帐号权限
解除数据库帐号权限。
:param RevokeSqlserverDbUserPrivilegeRequest request
:return: RevokeSqlserverDbUserPrivilegeResponse
"""
return self.revoke_sqlserver_db_user_privilege_with_http_info(request)
def revoke_sqlserver_db_user_privilege_with_http_info(self, request):
"""解除数据库帐号权限
解除数据库帐号权限。
:param RevokeSqlserverDbUserPrivilegeRequest request
:return: RevokeSqlserverDbUserPrivilegeResponse
"""
all_params = ['instance_id', 'sqlserver_revoke_request', 'x_language']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
if 'x_language' in local_var_params:
header_params['X-Language'] = local_var_params['x_language']
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/db_privilege',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RevokeSqlserverDbUserPrivilegeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| 31.170822
| 125
| 0.626474
| 20,350
| 202,548
| 5.815037
| 0.027322
| 0.054624
| 0.095592
| 0.041779
| 0.93843
| 0.926616
| 0.910281
| 0.889357
| 0.874434
| 0.727454
| 0
| 0.000684
| 0.285513
| 202,548
| 6,497
| 126
| 31.17562
| 0.817017
| 0.100174
| 0
| 0.857895
| 0
| 0
| 0.122593
| 0.045811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050789
| false
| 0.000263
| 0.002632
| 0
| 0.105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81138e45320d8978fa823b2a50372432fa5abb64
| 15,195
|
py
|
Python
|
backend/tests/baserow/contrib/database/api/export/test_export_views.py
|
cjh0613/baserow
|
62871f5bf53c9d25446976031aacb706c0abe584
|
[
"MIT"
] | null | null | null |
backend/tests/baserow/contrib/database/api/export/test_export_views.py
|
cjh0613/baserow
|
62871f5bf53c9d25446976031aacb706c0abe584
|
[
"MIT"
] | 1
|
2021-09-13T06:04:12.000Z
|
2021-09-13T06:04:12.000Z
|
backend/tests/baserow/contrib/database/api/export/test_export_views.py
|
cjh0613/baserow
|
62871f5bf53c9d25446976031aacb706c0abe584
|
[
"MIT"
] | 2
|
2021-09-20T15:38:30.000Z
|
2021-09-28T09:45:22.000Z
|
from unittest.mock import patch
import pytest
from django.core.files.storage import FileSystemStorage
from django.urls import reverse
from django.utils.dateparse import parse_datetime
from django.utils.timezone import utc, make_aware
from django_capture_on_commit_callbacks import capture_on_commit_callbacks
from freezegun import freeze_time
from rest_framework.fields import DateTimeField
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, HTTP_200_OK
from baserow.contrib.database.rows.handler import RowHandler
@pytest.mark.django_db
def test_unknown_export_type_for_table_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
data_fixture.create_text_field(table=table, name="text_field")
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "unknown",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
@pytest.mark.django_db
def test_exporting_table_without_permissions_returns_error(
data_fixture, api_client, tmpdir
):
user = data_fixture.create_user()
unpermissioned_user, unpermissioned_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
data_fixture.create_text_field(table=table, name="text_field")
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {unpermissioned_token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_USER_NOT_IN_GROUP"
@pytest.mark.django_db
def test_exporting_missing_view_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": 9999,
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_exporting_view_which_isnt_for_table_returns_error(
data_fixture, api_client, tmpdir
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
grid_view_for_other_table = data_fixture.create_grid_view()
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": grid_view_for_other_table.id,
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_VIEW_NOT_IN_TABLE"
@pytest.mark.django_db
def test_exporting_missing_table_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": 9999},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_TABLE_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_getting_missing_export_job_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": 9999}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_EXPORT_JOB_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_getting_other_users_export_job_returns_error(data_fixture, api_client, tmpdir):
user, token = data_fixture.create_user_and_token()
unpermissioned_user, unpermissioned_token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
data_fixture.create_text_field(table=table, name="text_field")
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
assert response.status_code == HTTP_200_OK
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": 9999}),
format="json",
HTTP_AUTHORIZATION=f"JWT {unpermissioned_token}",
)
assert response.status_code == HTTP_404_NOT_FOUND
assert response.json()["error"] == "ERROR_EXPORT_JOB_DOES_NOT_EXIST"
@pytest.mark.django_db
def test_exporting_csv_writes_file_to_storage(
data_fixture, api_client, tmpdir, settings
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
option_field = data_fixture.create_single_select_field(
table=table, name="option_field", order=1
)
option_a = data_fixture.create_select_option(
field=option_field, value="A", color="blue"
)
option_b = data_fixture.create_select_option(
field=option_field, value="B", color="red"
)
date_field = data_fixture.create_date_field(
table=table,
date_include_time=True,
date_format="US",
name="date_field",
order=2,
)
grid_view = data_fixture.create_grid_view(table=table)
data_fixture.create_view_filter(
view=grid_view, field=text_field, type="contains", value="test"
)
data_fixture.create_view_sort(view=grid_view, field=text_field, order="ASC")
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "test",
date_field.id: "2020-02-01 01:23",
option_field.id: option_b.id,
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "atest",
date_field.id: "2020-02-01 01:23",
option_field.id: option_a.id,
},
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
run_time = make_aware(parse_datetime("2020-02-01 01:00"), timezone=utc)
# DRF uses some custom internal date time formatting, use the field itself
# so the test doesn't break if we set a different default timezone format etc
expected_created_at = DateTimeField().to_representation(run_time)
with freeze_time(run_time):
with capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": grid_view.id,
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": None,
"exporter_type": "csv",
"progress_percentage": 0.0,
"status": "pending",
"table": table.id,
"view": grid_view.id,
"url": None,
}
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
filename = json["exported_file_name"]
assert json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": filename,
"exporter_type": "csv",
"progress_percentage": 1.0,
"status": "complete",
"table": table.id,
"view": grid_view.id,
"url": f"http://localhost:8000/media/export_files/{filename}",
}
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
assert file_path.isfile()
expected = (
"\ufeff"
"id,text_field,option_field,date_field\n"
"2,atest,A,02/01/2020 01:23\n"
"1,test,B,02/01/2020 01:23\n"
)
with open(file_path, "r", encoding="utf-8") as written_file:
assert written_file.read() == expected
@pytest.mark.django_db
def test_exporting_csv_table_writes_file_to_storage(
data_fixture, api_client, tmpdir, settings
):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
text_field = data_fixture.create_text_field(table=table, name="text_field", order=0)
option_field = data_fixture.create_single_select_field(
table=table, name="option_field", order=1
)
option_a = data_fixture.create_select_option(
field=option_field, value="A", color="blue"
)
option_b = data_fixture.create_select_option(
field=option_field, value="B", color="red"
)
date_field = data_fixture.create_date_field(
table=table,
date_include_time=True,
date_format="US",
name="date_field",
order=2,
)
grid_view = data_fixture.create_grid_view(table=table)
data_fixture.create_view_filter(
view=grid_view, field=text_field, type="contains", value="test"
)
data_fixture.create_view_sort(view=grid_view, field=text_field, order="ASC")
row_handler = RowHandler()
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "test",
date_field.id: "2020-02-01 01:23",
option_field.id: option_b.id,
},
)
row_handler.create_row(
user=user,
table=table,
values={
text_field.id: "atest",
date_field.id: "2020-02-01 01:23",
option_field.id: option_a.id,
},
)
storage = FileSystemStorage(location=(str(tmpdir)), base_url="http://localhost")
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
run_time = make_aware(parse_datetime("2020-02-01 01:00"), timezone=utc)
# DRF uses some custom internal date time formatting, use the field itself
# so the test doesn't break if we set a different default timezone format etc
expected_created_at = DateTimeField().to_representation(run_time)
with freeze_time(run_time):
with capture_on_commit_callbacks(execute=True):
response = api_client.post(
reverse(
"api:database:export:export_table",
kwargs={"table_id": table.id},
),
data={
"view_id": None,
"exporter_type": "csv",
"export_charset": "utf-8",
"csv_include_header": "True",
"csv_column_separator": ",",
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
response_json = response.json()
job_id = response_json["id"]
assert response_json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": None,
"exporter_type": "csv",
"progress_percentage": 0.0,
"status": "pending",
"table": table.id,
"view": None,
"url": None,
}
response = api_client.get(
reverse("api:database:export:get", kwargs={"job_id": job_id}),
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)
json = response.json()
filename = json["exported_file_name"]
assert json == {
"id": job_id,
"created_at": expected_created_at,
"exported_file_name": filename,
"exporter_type": "csv",
"progress_percentage": 1.0,
"status": "complete",
"table": table.id,
"view": None,
"url": f"http://localhost:8000/media/export_files/{filename}",
}
file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename)
assert file_path.isfile()
expected = (
"\ufeff"
"id,text_field,option_field,date_field\n"
"1,test,B,02/01/2020 01:23\n"
"2,atest,A,02/01/2020 01:23\n"
)
with open(file_path, "r", encoding="utf-8") as written_file:
assert written_file.read() == expected
| 36.881068
| 88
| 0.595986
| 1,740
| 15,195
| 4.881034
| 0.113218
| 0.060874
| 0.076063
| 0.03391
| 0.925704
| 0.918757
| 0.913929
| 0.909926
| 0.888261
| 0.888261
| 0
| 0.01905
| 0.291807
| 15,195
| 411
| 89
| 36.970803
| 0.770189
| 0.019546
| 0
| 0.792553
| 0
| 0
| 0.177466
| 0.051568
| 0
| 0
| 0
| 0
| 0.06117
| 1
| 0.023936
| false
| 0
| 0.029255
| 0
| 0.053191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4912dc7e5ea6a960cb48402034ff1a21b2d9d04
| 9,042
|
py
|
Python
|
train_attention.py
|
ivandrej/checkmatte
|
48992e9eb777b609eab32e58d918f1fb41ca51f1
|
[
"MIT"
] | null | null | null |
train_attention.py
|
ivandrej/checkmatte
|
48992e9eb777b609eab32e58d918f1fb41ca51f1
|
[
"MIT"
] | null | null | null |
train_attention.py
|
ivandrej/checkmatte
|
48992e9eb777b609eab32e58d918f1fb41ca51f1
|
[
"MIT"
] | null | null | null |
"""
Example usage:
To train a resolution 227 x 128 F3 model:
python train_attention.py --model-variant mobilenetv3 --dataset videomatte --resolution-lr 128 --seq-length-lr 15
--learning-rate-backbone 0.0001 --learning-rate-aspp 0.0002 --learning-rate-decoder 0.0002 --learning-rate-refiner 0
--checkpoint-dir checkpoint/stage1 --log-dir log/stage1 --epoch-start 0 --epoch-end 40
--log-train-images-interval 1000 --checkpoint-save-interval 2000 --batch-size-per-gpu 4 --temporal_offset 10
--model-type f3 --num-workers 8
To train a resolution 512 x 288 F4 (reduced) model:
python train_attention.py --model-variant mobilenetv3reduced --dataset videomatte --resolution-lr 288 \
--seq-length-lr 15 --learning-rate-backbone 0.0001 --learning-rate-aspp 0.0002 --learning-rate-decoder 0.0002 \
--learning-rate-refiner 0 --checkpoint-dir checkpoint/stage1 --log-dir log/stage1 --epoch-start 0 --epoch-end 100 \
--log-train-images-interval 3000 --checkpoint-save-interval 2000 --batch-size-per-gpu 4 --num-workers 8 \
--temporal_offset 10 --model-type f4 --disable-mixed-precision
"""
import torch
from torch import multiprocessing as mp
from base_attention_trainer import AbstractAttentionTrainer
from model import model_attention_after_aspp, model_attention_concat, model_attention_f3, model_attention_f3_f2, \
model_attention_f4, model_attention_f4_noaspp
class AttentionTrainer(AbstractAttentionTrainer):
def init_network(self):
if self.args.model_type == 'after_aspp':
self.model = model_attention_after_aspp.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp_bgr.parameters(), 'lr': self.args.learning_rate_aspp},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention.parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
elif self.args.model_type == 'concat':
self.model = model_attention_concat.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp_bgr.parameters(), 'lr': self.args.learning_rate_aspp},
{'params': self.model.project_concat.parameters(), 'lr': self.args.learning_rate_aspp},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention.parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
elif self.args.model_type == 'f3':
self.model = model_attention_f3.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention.parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
elif self.args.model_type == 'f4':
self.model = model_attention_f4.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention.parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
elif self.args.model_type == 'f4_noaspp':
self.model = model_attention_f4_noaspp.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention.parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone}]
# {'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
else:
self.model = model_attention_f3_f2.MattingNetwork(self.args.model_variant,
pretrained_backbone=True,
pretrained_on_rvm=self.args.pretrained_on_rvm).to(
self.rank)
for att_module in self.model.spatial_attention.values():
att_module.to(self.rank)
self.param_lrs = [{'params': self.model.backbone_bgr.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder},
{'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner},
{'params': self.model.spatial_attention['f3'].parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.spatial_attention['f2'].parameters(),
'lr': self.args.learning_rate_backbone},
{'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone},
{'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}]
def custom_args(self, parser):
parser.add_argument('--model-type', type=str,
choices=['after_aspp', 'concat', 'f4', 'f4_noaspp', 'f3', 'f2_f3'])
if __name__ == '__main__':
world_size = torch.cuda.device_count()
mp.spawn(
AttentionTrainer,
nprocs=world_size,
args=(world_size,),
join=True)
| 71.19685
| 124
| 0.561823
| 925
| 9,042
| 5.273514
| 0.131892
| 0.093481
| 0.123001
| 0.164002
| 0.807298
| 0.773268
| 0.773268
| 0.757278
| 0.757278
| 0.740057
| 0
| 0.018672
| 0.318845
| 9,042
| 126
| 125
| 71.761905
| 0.77334
| 0.133709
| 0
| 0.631579
| 0
| 0
| 0.051279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021053
| false
| 0
| 0.042105
| 0
| 0.073684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4d4f54baf5b4398a962219168068042562e28c9
| 2,399
|
py
|
Python
|
azcam_osu4k/detector_sta0500_osu4k.py
|
mplesser/azcam-osu4k
|
09ffcd06b1b5782a6ace6d4043a09e4bda0e735c
|
[
"MIT"
] | null | null | null |
azcam_osu4k/detector_sta0500_osu4k.py
|
mplesser/azcam-osu4k
|
09ffcd06b1b5782a6ace6d4043a09e4bda0e735c
|
[
"MIT"
] | null | null | null |
azcam_osu4k/detector_sta0500_osu4k.py
|
mplesser/azcam-osu4k
|
09ffcd06b1b5782a6ace6d4043a09e4bda0e735c
|
[
"MIT"
] | null | null | null |
detector_sta0500 = {
"name": "STA0500",
"description": "STA STA0500 CCD",
"ref_pixel": [2032, 2032],
"format": [4064, 3, 0, 20, 4064, 0, 0, 0, 0],
"focalplane": [1, 1, 2, 2, "0000"],
"amp_cfg": [0, 0, 0, 0],
"roi": [1, 4064, 1, 4064, 1, 1],
"ext_position": [[1, 1], [1, 2], [2, 1], [2, 2]],
"jpg_order": [1, 2, 3, 4],
"ext_name": ["im3", "im1", "im4", "im2"],
"ext_number": [3, 1, 4, 2],
"det_number": [1, 1, 1, 1],
"det_position": [[1, 1], [1, 1], [1, 1], [1, 1]],
"det_gap": [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]],
"amp_position": [[1, 1], [1, 2], [2, 1], [2, 2]],
"amp_pixel_position": [[1, 1], [1, 1], [1, 1], [1, 1]],
}
detector_sta0500_1amp = {
"name": "STA0500",
"description": "STA STA0500 CCD",
"ref_pixel": [2032, 2032],
"format": [4064, 3, 0, 20, 4064, 0, 0, 0, 0],
"focalplane": [1, 1, 1, 1, "0"],
"amp_cfg": [0],
"roi": [1, 4064, 1, 4064, 1, 1],
"ext_position": [[1, 1]],
"jpg_order": [1],
"ext_name": ["im1"],
"ext_number": [1],
"det_number": [1],
"det_position": [[1, 1]],
"det_gap": [[0.0, 0.0]],
"amp_position": [[1, 1]],
"amp_pixel_position": [[1, 1]],
}
detector_sta0500_2paramps_right = {
"name": "STA0500",
"description": "STA STA0500 CCD",
"ref_pixel": [2032, 2032],
"format": [4064, 3, 0, 20, 4064, 0, 0, 0, 0],
"focalplane": [1, 1, 1, 2, "01"],
"amp_cfg": [0, 1],
"roi": [1, 4064, 1, 4064, 1, 1],
"ext_position": [[1, 1], [1, 2]],
"jpg_order": [1, 2],
"ext_name": ["im1", "im2"],
"ext_number": [1, 2],
"det_number": [1, 1],
"det_position": [[1, 1], [1, 1]],
"det_gap": [[0.0, 0.0], [0.0, 0.0]],
"amp_position": [[1, 1], [1, 2]],
"amp_pixel_position": [[1, 1], [1, 1]],
}
detector_sta0500_2seramps = {
"name": "STA0500",
"description": "STA STA0500 CCD",
"ref_pixel": [2032, 2032],
"format": [4064, 3, 0, 20, 4064, 0, 0, 0, 0], # overscan was 50
"focalplane": [1, 1, 2, 1, "01"],
"amp_cfg": [0, 1],
"roi": [1, 4064, 1, 4064, 1, 1],
"ext_position": [[1, 1], [2, 1]],
"jpg_order": [1, 2],
"ext_name": ["im1", "im2"],
"ext_number": [1, 2],
"det_number": [1, 1],
"det_position": [[1, 1], [1, 1]],
"det_gap": [[0.0, 0.0], [0.0, 0.0]],
"amp_position": [[1, 1], [2, 1]],
"amp_pixel_position": [[1, 1], [1, 1]],
}
| 31.565789
| 68
| 0.466028
| 384
| 2,399
| 2.765625
| 0.106771
| 0.105461
| 0.107345
| 0.109228
| 0.833333
| 0.798493
| 0.752354
| 0.733522
| 0.678908
| 0.645009
| 0
| 0.206498
| 0.243018
| 2,399
| 75
| 69
| 31.986667
| 0.378304
| 0.006253
| 0
| 0.5
| 0
| 0
| 0.300588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4f4c4574b9f50c8e1e94da2f3207ec87f868649
| 104
|
py
|
Python
|
tfcoreml/__init__.py
|
drush-apple/tf-coreml
|
1da1d0f45fca018c08119fd2e6e8e7583e0c7243
|
[
"Apache-2.0"
] | 481
|
2018-04-15T15:32:10.000Z
|
2021-11-12T13:47:47.000Z
|
tfcoreml/__init__.py
|
drush-apple/tf-coreml
|
1da1d0f45fca018c08119fd2e6e8e7583e0c7243
|
[
"Apache-2.0"
] | 7
|
2018-06-14T03:14:22.000Z
|
2021-11-11T18:46:52.000Z
|
tfcoreml/__init__.py
|
drush-apple/tf-coreml
|
1da1d0f45fca018c08119fd2e6e8e7583e0c7243
|
[
"Apache-2.0"
] | 81
|
2018-06-02T01:36:22.000Z
|
2021-05-21T02:03:43.000Z
|
from ._tf_coreml_converter import convert
from .optimizations._optimize_nn_spec import optimize_nn_spec
| 34.666667
| 61
| 0.894231
| 15
| 104
| 5.666667
| 0.666667
| 0.235294
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 104
| 2
| 62
| 52
| 0.885417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be08e61235ec1e6de61a8a30fb497932ac728f03
| 4,631
|
py
|
Python
|
bdd_tests/mocks/insights-content-service/content_server.py
|
RedHatInsights/ccx-notification-service
|
2f7702fb3124228b043f826fa4835cd3db0c2111
|
[
"Apache-2.0"
] | 1
|
2021-04-19T10:42:26.000Z
|
2021-04-19T10:42:26.000Z
|
bdd_tests/mocks/insights-content-service/content_server.py
|
RedHatInsights/ccx-notification-service
|
2f7702fb3124228b043f826fa4835cd3db0c2111
|
[
"Apache-2.0"
] | 51
|
2021-04-12T07:39:10.000Z
|
2022-03-08T14:15:44.000Z
|
bdd_tests/mocks/insights-content-service/content_server.py
|
RedHatInsights/ccx-notification-service
|
2f7702fb3124228b043f826fa4835cd3db0c2111
|
[
"Apache-2.0"
] | 7
|
2021-04-02T06:21:59.000Z
|
2022-03-23T16:29:32.000Z
|
import json
from fastapi import FastAPI, Response
import pygob
app = FastAPI()
"""
This is the data and format returned
{
Config: {
Impact: map [
Data Loss:4
Hung Task:3
Application Failure:2
Hardening:1
]
}
Rules: map[
test_rule: {
Plugin: {
Name:Rule for testing purposes
NodeID:
ProductCode: OCP4
PythonModule: ccx_rules_ocm.test_rule
}
ErrorKeys: map[
TEST_RULE_CRITICAL_IMPACT: {
Metadata: {
Description:A test rule for E2E tests that depend on this service
Impact:{Name:Data Loss Impact:4}
Likelihood:4
PublishDate:2020-04-08 00:42:00
Status:active
Tags:[openshift service_availability]
}
TotalRisk:0
Generic:Generic description of the critical impact test rule.\n
Summary:This is an example recommendation used for testing purposes. IT SHOULD NOT BE SERVED IN ANY ENVIRONMENT THAT IS NOT FOR TESTING.\n
Resolution:some resolution\n
MoreInfo:For more info about the Remote Health Monitoring, refer to [documentation](https://docs.openshift.com/container-platform/4.3/support/remote_health_monitoring/about-remote-health-monitoring.html)\n
Reason:some reason\n
HasReason:true
}
TEST_RULE_IMPORTANT_IMPACT: {Metadata:{Description:A test rule for E2E tests that depend on this service Impact:{Name:Hung Task Impact:3} Likelihood:3 PublishDate:2020-04-08 00:42:00 Status:active Tags:[openshift service_availability]} TotalRisk:0 Generic:Generic description of the important impact test rule.\n Summary:This is an example recommendation used for testing purposes. IT SHOULD NOT BE SERVED IN ANY ENVIRONMENT THAT IS NOT FOR TESTING.\n Resolution:some resolution\n MoreInfo:For more info about the Remote Health Monitoring, refer to [documentation](https://docs.openshift.com/container-platform/4.3/support/remote_health_monitoring/about-remote-health-monitoring.html)\n Reason:some reason\n HasReason:true}
TEST_RULE_LOW_IMPACT:{Metadata:{Description:A test rule for E2E tests that depend on this service Impact:{Name:Hardening Impact:1} Likelihood:1 PublishDate:2020-04-08 00:42:00 Status:active Tags:[openshift service_availability]} TotalRisk:0 Generic:Generic description of the low impact test rule.\n Summary:This is an example recommendation used for testing purposes. IT SHOULD NOT BE SERVED IN ANY ENVIRONMENT THAT IS NOT FOR TESTING.\n Resolution:some resolution\n MoreInfo:For more info about the Remote Health Monitoring, refer to [documentation](https://docs.openshift.com/container-platform/4.3/support/remote_health_monitoring/about-remote-health-monitoring.html)\n Reason:some reason\n HasReason:true}
TEST_RULE_MODERATE_IMPACT:{Metadata:{Description:A test rule for E2E tests that depend on this service Impact:{Name:Application Failure Impact:2} Likelihood:2 PublishDate:2020-04-08 00:42:00 Status:active Tags:[openshift service_availability]} TotalRisk:0 Generic:Generic description of the moderate impact test rule.\n Summary:This is an example recommendation used for testing purposes. IT SHOULD NOT BE SERVED IN ANY ENVIRONMENT THAT IS NOT FOR TESTING.\n Resolution:some resolution\n MoreInfo:For more info about the Remote Health Monitoring, refer to [documentation](https://docs.openshift.com/container-platform/4.3/support/remote_health_monitoring/about-remote-health-monitoring.html)\n Reason:some reason\n HasReason:true}
]
Generic: Summary:This is an example recommendation used for testing purposes. IT SHOULD NOT BE SERVED IN ANY ENVIRONMENT THAT IS NOT FOR TESTING.\n
Resolution:some resolution\n
MoreInfo:For more info about the Remote Health Monitoring, refer to [documentation](https://docs.openshift.com/container-platform/4.3/support/remote_health_monitoring/about-remote-health-monitoring.html)\n
Reason:some reason\n
HasReason:true
}
]
}
"""
@app.get("/api/v1/openapi.json")
def read_test_content():
return {"openapi.json": "ok"}
@app.get("/api/v1/content")
def read_test_content():
with open('test_content_gob', 'rb') as test_content_gob:
return Response(test_content_gob.read())
| 67.115942
| 746
| 0.687109
| 612
| 4,631
| 5.133987
| 0.197712
| 0.057288
| 0.105029
| 0.02387
| 0.802673
| 0.802673
| 0.802673
| 0.802673
| 0.802673
| 0.802673
| 0
| 0.025198
| 0.237314
| 4,631
| 68
| 747
| 68.102941
| 0.864383
| 0
| 0
| 0.181818
| 0
| 0
| 0.196481
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.272727
| 0.090909
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
076dc33958a93b35cfc3119b1f237767cfd4a79a
| 5,405
|
py
|
Python
|
python/dynamic_graph/sot/torque_control/talos/joint_pos_ctrl_gains_sim.py
|
Hugo-L3174/talos-torque-control
|
14faafcc06c93b57c972e92c1684b006667ff32e
|
[
"BSD-2-Clause"
] | 3
|
2020-06-12T15:36:32.000Z
|
2021-04-07T20:12:11.000Z
|
python/dynamic_graph/sot/torque_control/talos/joint_pos_ctrl_gains_sim.py
|
Hugo-L3174/talos-torque-control
|
14faafcc06c93b57c972e92c1684b006667ff32e
|
[
"BSD-2-Clause"
] | 5
|
2018-07-20T08:22:37.000Z
|
2020-07-31T14:41:42.000Z
|
python/dynamic_graph/sot/torque_control/talos/joint_pos_ctrl_gains_sim.py
|
Hugo-L3174/talos-torque-control
|
14faafcc06c93b57c972e92c1684b006667ff32e
|
[
"BSD-2-Clause"
] | 4
|
2018-07-02T12:34:53.000Z
|
2020-05-25T12:46:28.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 9 13:55:16 2015
Position control PID gains stored according to the controller time step (1, 2, 5 or 10 ms).
@author: adelpret
"""
from joint_pos_ctrl_gains import *
key = round(0.001,3);
# kp_pos[key] = np.array(
# (1000., 1000., 1000., 1300., 750., 750.,
# 1000., 1000., 1000., 1300., 750., 750.,
# 4000., 4000.,
# 200.0, 1000., 500., 500., 500., 300., 300.,
# 0.2,
# 200.0, 1000., 500., 500., 500., 300., 300.,
# 0.2,
# 10.,10.));
# (1000, 5000, 1000, 1000, 500, 500,
# 1000, 5000, 1000, 1000, 500, 500,
# 10000, 10000,
# 500, 500, 500, 500, 100, 100, 100,
# 50,
# 500, 500, 500, 500, 100, 100, 100,
# 50,
# 10, 10)); # proportional gain of postural task
kp_pos[key] = np.array(
(5000., 5000., 5000., 5000., 5000., 5000.,
5000., 5000., 5000., 5000., 5000., 5000.,
10000., 10000.,
10000.0, 20000., 5000., 5000., 500., 500., 500.,
1000.0,
10000., 20000., 5000., 5000., 500., 500., 500.,
1000.0,
300.0, 300.0));
kd_pos[key] = np.array(
(20., 20., 20., 20., 20., 20.,
20., 20., 20., 20., 20., 20.,
10., 10.,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0,
0.1, 0.1));
ki_pos[key] = np.array(
(5., 5., 5., 5., 5., 5.,
5., 5., 5., 5., 5., 5.,
1., 1.,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0,
1.0, 1.0));
key = round(0.002,3);
kp_pos[key] = kp_pos[round(0.001,3)];
kd_pos[key] = kd_pos[round(0.001,3)];
ki_pos[key] = ki_pos[round(0.001,3)];
key = round(0.003,3);
kp_pos[key] = kp_pos[round(0.001,3)];
kd_pos[key] = kd_pos[round(0.001,3)];
ki_pos[key] = ki_pos[round(0.001,3)];
key = round(0.005,3);
kp_pos[key] = kp_pos[round(0.001,3)];
kd_pos[key] = kd_pos[round(0.001,3)];
ki_pos[key] = ki_pos[round(0.001,3)];
key = round(0.010,3);
kp_pos[key] = kp_pos[round(0.001,3)];
kd_pos[key] = kd_pos[round(0.001,3)];
ki_pos[key] = ki_pos[round(0.001,3)];
'''kp_pos[key] = 20*np.array(
(500., 500., 500., 500., 800., 800.,
500., 500., 500., 500., 800., 800.,
500., 500.,
100., 100., 100., 100., 100., 100., 100.,
10.0,
100., 100., 100., 100., 100., 100., 100.,
10.0,
100.0,100.0));
kd_pos[key] = .0*np.array(
(17.96, 103.4, 83.35, 154.36, 29.08, 22.44,
17.96, 103.4, 83.35, 154.36, 29.08, 22.44,
48.4, 78.4,
32.35, 55.86, 10.83, 87.05, 13.87, 11.19, 6.93,
1.0,
32.35, 55.86, 10.83, 87.05, 13.87, 11.19, 6.93,
1.0,
1.0,1.0));
ki_pos[key] = np.zeros(NJ);'''
'''key = round(0.002,3);
kp_pos[key] = np.array(
(450, 3231.25, 1736.5, 3858.75, 727, 561,
450, 3231.25, 1736.5, 3858.75, 727, 561,
1210, 1960,
808.75, 1396.5, 357.25, 2176.25, 346.5, 373, 173.25,
1.0,
808.75, 1396.5, 357.25, 2176.25, 346.5, 373, 173.25,
1.0,
1.0,1.0));
kd_pos[key] = np.array(
(8.98, 51.7, 41.675, 77.18, 14.54, 11.22,
8.98, 51.7, 41.675, 77.18, 14.54, 11.22,
24.2, 39.2,
16.175, 27.93, 5.415, 43.525, 6.935, 5.595, 3.465,
1.0,
16.175, 27.93, 5.415, 43.525, 6.935, 5.595, 3.465,
1.0,
1.0,1.0));
ki_pos[key] = ki_pos[round(0.001,3)];
key = round(0.005,3);
kp_pos[key] = np.array(
(288, 2068, 1111.36, 2469.6, 465.28, 359.04,
288, 2068, 1111.36, 2469.6, 465.28, 359.04,
317.194, 513.802,
212.009, 366.084, 93.6512, 570.491, 90.8326, 97.7798, 45.4164,
1.0,
212.009, 366.084, 93.6512, 570.491, 90.8326, 97.7798, 45.4164,
1.0,
1.0,1.0));
kd_pos[key] = np.array(
(7.184, 41.36, 33.34, 61.744, 11.632, 8.976,
7.184, 41.36, 33.34, 61.744, 11.632, 8.976,
12.3904, 20.0704,
8.2816, 14.3002, 2.77248, 22.2848, 3.55072, 2.86464, 1.77408,
1.0,
8.2816, 14.3002, 2.77248, 22.2848, 3.55072, 2.86464, 1.77408,
1.0,
1.0,1.0));
ki_pos[key] = ki_pos[round(0.001,3)];
key = round(0.010,3);
kp_pos[key] = np.array(
(184.32, 1323.52, 711.27, 1580.54, 850.247, 650.338,
184.32, 1323.52, 711.27, 1580.54, 850.247, 650.338,
203.004, 328.833,
86.839, 149.948, 38.3596, 233.673, 23.8113, 25.6324, 11.9057,
1.0,
86.839, 149.948, 38.3596, 233.673, 23.8113, 25.6324, 11.9057,
1.0,
1.0,1.0));
kd_pos[key] = np.array(
(5.7472, 33.088, 26.672, 49.3952, 19.0973, 16.335,
5.7472, 33.088, 26.672, 49.3952, 19.0973, 16.335,
9.91232, 16.0563,
5.30022, 9.15216, 1.77438, 14.2622, 1.81797, 1.4667, 0.908328,
1.0,
5.30022, 9.15216, 1.77438, 14.2622, 1.81797, 1.4667, 0.908328,
1.0,
1.0,1.0));
ki_pos[key] = ki_pos[round(0.001,3)];'''
| 33.78125
| 91
| 0.453099
| 960
| 5,405
| 2.503125
| 0.21875
| 0.052434
| 0.076155
| 0.098211
| 0.853516
| 0.841448
| 0.786517
| 0.775697
| 0.702871
| 0.676238
| 0
| 0.475952
| 0.334505
| 5,405
| 159
| 92
| 33.993711
| 0.192105
| 0.124514
| 0
| 0.488889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022222
| 0
| 0.022222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07741823732aee3c0434588f527ecfb14f48c47b
| 80
|
py
|
Python
|
bfgame/factories/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
bfgame/factories/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
bfgame/factories/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from bfgame.factories import recipes
from bfgame.factories.facade import Facade
| 26.666667
| 42
| 0.8625
| 11
| 80
| 6.272727
| 0.545455
| 0.289855
| 0.550725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 80
| 2
| 43
| 40
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed16aa8d5371202b0cb5e8bd15bcc2377240f82c
| 10,130
|
py
|
Python
|
system_node_only/indy-node-tests/TestAuditSuite.py
|
udosson/indy-test-automation
|
e79f4b6a88ff8c6181c758be52475c7cbf79ee74
|
[
"Apache-2.0"
] | null | null | null |
system_node_only/indy-node-tests/TestAuditSuite.py
|
udosson/indy-test-automation
|
e79f4b6a88ff8c6181c758be52475c7cbf79ee74
|
[
"Apache-2.0"
] | null | null | null |
system_node_only/indy-node-tests/TestAuditSuite.py
|
udosson/indy-test-automation
|
e79f4b6a88ff8c6181c758be52475c7cbf79ee74
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import asyncio
from system.utils import *
import logging
logger = logging.getLogger(__name__)
@pytest.mark.usefixtures('docker_setup_and_teardown')
class TestAuditSuite:
@pytest.mark.asyncio
async def test_case_restart_one_node(
self, pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
test_nodes = [NodeHost(i) for i in range(1, 8)]
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
test_nodes[5].restart_service()
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
primary2 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
p2 = NodeHost(primary2)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
test_nodes[5].restart_service()
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
p2.stop_service()
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary2)
test_nodes[5].stop_service()
p2.start_service()
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
test_nodes[5].start_service()
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.parametrize('node_num_shift', [0, 1, 5])
@pytest.mark.asyncio
async def test_case_restart_master_backup_non_primary(
self, pool_handler, wallet_handler, get_default_trustee, node_num_shift, nodes_num,
check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
primary2 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
next_node = NodeHost(int(primary2) + node_num_shift)
next_node.restart_service()
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.asyncio
async def test_case_restart_all_nodes_at_the_same_time(
self, pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
test_nodes = [NodeHost(i) for i in range(1, 8)]
logger.info("1: Initiating a view change by stopping master primary")
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
logger.info("2: Ensure that primary has been changed")
primary2 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
logger.info("3: Ensure pool works")
await check_pool_is_functional(pool_handler, wallet_handler, trustee_did, nyms_count=15)
p1.start_service()
logger.info("4: Restarting the pool")
restart_pool(test_nodes)
logger.info("5: Ensure pool is in sync")
await ensure_pool_is_in_sync(nodes_num=nodes_num)
logger.info("6: Ensure that primary has not been changed")
primary_after_restart, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
assert primary_after_restart == primary2
logger.info("7: Ensure pool works")
await ensure_pool_is_functional(
pool_handler, wallet_handler, trustee_did, nyms_count=30
)
@pytest.mark.asyncio
async def test_case_restart_f_nodes(
self, pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
test_nodes = [NodeHost(i) for i in range(1, 8)]
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
for node in test_nodes[5:]:
node.restart_service()
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.asyncio
async def test_case_restart_n_minus_f_minus_one_nodes(
self, pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
test_nodes = [NodeHost(i) for i in range(1, 8)]
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
for node in test_nodes[3:]:
node.restart_service()
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.asyncio
async def test_case_restart_all_nodes_one_by_one(
self, pool_handler, wallet_handler, get_default_trustee, nodes_num, check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
test_nodes = [NodeHost(i) for i in range(1, 8)]
primary1, alias, target_did = await get_primary(pool_handler, wallet_handler, trustee_did)
p1 = NodeHost(primary1)
p1.stop_service()
await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
for node in test_nodes:
node.restart_service()
# do not remove/change with eventually - it is sequential node stopping
await asyncio.sleep(10)
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
@pytest.mark.parametrize('node_num_shift', [0, 1, 5])
@pytest.mark.asyncio
async def test_case_demote_master_backup_non_primary(
self, pool_handler, wallet_handler, get_default_trustee, node_num_shift, nodes_num,
check_no_failures_fixture
):
trustee_did, _ = get_default_trustee
primary1, alias1, target_did1 = await get_primary(pool_handler, wallet_handler, trustee_did)
print('Primary at the beginning is {}'.format(primary1))
p1 = NodeHost(primary1)
p1.stop_service()
primary2 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary1)
print('Primary after service stop is {}'.format(primary2))
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
p1.start_service()
primary, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
print('Primary after service start is {}'.format(primary))
# demote master primary / backup primary / non primary here
alias_for_demotion = 'Node{}'.format(int(primary2)+node_num_shift)
print(alias_for_demotion)
target_did_for_demotion = get_pool_info(primary2)[alias_for_demotion]
print(target_did_for_demotion)
primary, _, _ = await get_primary(pool_handler, wallet_handler, trustee_did)
print('Primary before demotion is {}'.format(primary))
await eventually(
demote_node, pool_handler, wallet_handler, trustee_did, alias_for_demotion, target_did_for_demotion
)
primary3 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary2)
print('Primary after demotion is {}'.format(primary3))
await ensure_pool_performs_write_read(pool_handler, wallet_handler, trustee_did, nyms_count=5)
await eventually(
promote_node, pool_handler, wallet_handler, trustee_did, alias_for_demotion, target_did_for_demotion
)
primary4 = await ensure_primary_changed(pool_handler, wallet_handler, trustee_did, primary3)
print('Primary after promotion is {}'.format(primary4))
await ensure_pool_is_in_sync(nodes_num=nodes_num)
await ensure_pool_is_functional(pool_handler, wallet_handler, trustee_did)
| 53.315789
| 113
| 0.717374
| 1,317
| 10,130
| 5.090357
| 0.096431
| 0.090245
| 0.139469
| 0.196897
| 0.835024
| 0.825477
| 0.824881
| 0.819511
| 0.806533
| 0.799224
| 0
| 0.014518
| 0.211254
| 10,130
| 189
| 114
| 53.597884
| 0.824531
| 0.012537
| 0
| 0.662722
| 0
| 0
| 0.047197
| 0.002548
| 0
| 0
| 0
| 0
| 0.005917
| 1
| 0
| false
| 0
| 0.023669
| 0
| 0.029586
| 0.047337
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed28f9fa00dcdc689c720461e192e51edb3c7b48
| 122
|
py
|
Python
|
src/python/fundamentals/regressions/problems/c.py
|
kevmo314/transformers
|
0370daeb5d1f9ef2fd98a5ed28d7b457befac517
|
[
"MIT"
] | 1
|
2020-08-02T19:28:25.000Z
|
2020-08-02T19:28:25.000Z
|
src/python/fundamentals/regressions/problems/c.py
|
kevmo314/transformers
|
0370daeb5d1f9ef2fd98a5ed28d7b457befac517
|
[
"MIT"
] | null | null | null |
src/python/fundamentals/regressions/problems/c.py
|
kevmo314/transformers
|
0370daeb5d1f9ef2fd98a5ed28d7b457befac517
|
[
"MIT"
] | null | null | null |
import numpy
def linear_regression(data: List[Tuple[float, float]]) -> Tuple[float, float]:
pass # Implement this.
| 20.333333
| 78
| 0.704918
| 16
| 122
| 5.3125
| 0.75
| 0.235294
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 122
| 5
| 79
| 24.4
| 0.833333
| 0.122951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
ed31727504b27b116254797170bcadecfe95fee5
| 255,253
|
py
|
Python
|
var/assets/clients/gen-py/clause/Serving.py
|
gagaboy/clause
|
f98f0c551baead034736591cad30eda2c2ce895c
|
[
"Apache-2.0"
] | 344
|
2019-09-09T11:23:43.000Z
|
2022-03-25T17:36:33.000Z
|
var/assets/clients/gen-py/clause/Serving.py
|
huangweiboy/clause
|
7553deea9857a93b733e9771b3649741c0dc3cb9
|
[
"Apache-2.0"
] | 41
|
2019-09-05T02:16:46.000Z
|
2021-12-13T10:49:10.000Z
|
var/assets/clients/gen-py/clause/Serving.py
|
huangweiboy/clause
|
7553deea9857a93b733e9771b3649741c0dc3cb9
|
[
"Apache-2.0"
] | 112
|
2019-09-11T07:20:06.000Z
|
2022-03-17T07:03:30.000Z
|
#
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
"""
API
"""
def postCustomDict(self, request):
"""
词典管理
Parameters:
- request
"""
pass
def putCustomDict(self, request):
"""
Parameters:
- request
"""
pass
def getCustomDicts(self, request):
"""
Parameters:
- request
"""
pass
def getCustomDict(self, request):
"""
Parameters:
- request
"""
pass
def delCustomDict(self, request):
"""
Parameters:
- request
"""
pass
def postSysDict(self, request):
"""
Parameters:
- request
"""
pass
def putSysDict(self, request):
"""
Parameters:
- request
"""
pass
def getSysDicts(self, request):
"""
Parameters:
- request
"""
pass
def getSysDict(self, request):
"""
Parameters:
- request
"""
pass
def refSysDict(self, request):
"""
Parameters:
- request
"""
pass
def unrefSysDict(self, request):
"""
Parameters:
- request
"""
pass
def myDicts(self, request):
"""
Parameters:
- request
"""
pass
def mySysdicts(self, request):
"""
Parameters:
- request
"""
pass
def putDictWord(self, request):
"""
Parameters:
- request
"""
pass
def getDictWords(self, request):
"""
Parameters:
- request
"""
pass
def delDictWord(self, request):
"""
Parameters:
- request
"""
pass
def hasDictWord(self, request):
"""
Parameters:
- request
"""
pass
def postIntent(self, request):
"""
意图
Parameters:
- request
"""
pass
def putIntent(self, request):
"""
Parameters:
- request
"""
pass
def getIntents(self, request):
"""
Parameters:
- request
"""
pass
def getIntent(self, request):
"""
Parameters:
- request
"""
pass
def delIntent(self, request):
"""
Parameters:
- request
"""
pass
def postUtter(self, request):
"""
Parameters:
- request
"""
pass
def putUtter(self, request):
"""
Parameters:
- request
"""
pass
def getUtters(self, request):
"""
Parameters:
- request
"""
pass
def getUtter(self, request):
"""
Parameters:
- request
"""
pass
def delUtter(self, request):
"""
Parameters:
- request
"""
pass
def postSlot(self, request):
"""
Parameters:
- request
"""
pass
def putSlot(self, request):
"""
Parameters:
- request
"""
pass
def getSlots(self, request):
"""
Parameters:
- request
"""
pass
def getSlot(self, request):
"""
Parameters:
- request
"""
pass
def delSlot(self, request):
"""
Parameters:
- request
"""
pass
def train(self, request):
"""
对话管理
Parameters:
- request
"""
pass
def status(self, request):
"""
Parameters:
- request
"""
pass
def devver(self, request):
"""
Parameters:
- request
"""
pass
def prover(self, request):
"""
Parameters:
- request
"""
pass
def version(self, request):
"""
Parameters:
- request
"""
pass
def chat(self, request):
"""
Parameters:
- request
"""
pass
def online(self, request):
"""
Parameters:
- request
"""
pass
def offline(self, request):
"""
Parameters:
- request
"""
pass
def putSession(self, request):
"""
Parameters:
- request
"""
pass
def getSession(self, request):
"""
Parameters:
- request
"""
pass
class Client(Iface):
"""
API
"""
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def postCustomDict(self, request):
"""
词典管理
Parameters:
- request
"""
self.send_postCustomDict(request)
return self.recv_postCustomDict()
def send_postCustomDict(self, request):
self._oprot.writeMessageBegin('postCustomDict', TMessageType.CALL, self._seqid)
args = postCustomDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_postCustomDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = postCustomDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "postCustomDict failed: unknown result")
def putCustomDict(self, request):
"""
Parameters:
- request
"""
self.send_putCustomDict(request)
return self.recv_putCustomDict()
def send_putCustomDict(self, request):
self._oprot.writeMessageBegin('putCustomDict', TMessageType.CALL, self._seqid)
args = putCustomDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putCustomDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putCustomDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putCustomDict failed: unknown result")
def getCustomDicts(self, request):
"""
Parameters:
- request
"""
self.send_getCustomDicts(request)
return self.recv_getCustomDicts()
def send_getCustomDicts(self, request):
self._oprot.writeMessageBegin('getCustomDicts', TMessageType.CALL, self._seqid)
args = getCustomDicts_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getCustomDicts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getCustomDicts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getCustomDicts failed: unknown result")
def getCustomDict(self, request):
"""
Parameters:
- request
"""
self.send_getCustomDict(request)
return self.recv_getCustomDict()
def send_getCustomDict(self, request):
self._oprot.writeMessageBegin('getCustomDict', TMessageType.CALL, self._seqid)
args = getCustomDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getCustomDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getCustomDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getCustomDict failed: unknown result")
def delCustomDict(self, request):
"""
Parameters:
- request
"""
self.send_delCustomDict(request)
return self.recv_delCustomDict()
def send_delCustomDict(self, request):
self._oprot.writeMessageBegin('delCustomDict', TMessageType.CALL, self._seqid)
args = delCustomDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delCustomDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delCustomDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "delCustomDict failed: unknown result")
def postSysDict(self, request):
"""
Parameters:
- request
"""
self.send_postSysDict(request)
return self.recv_postSysDict()
def send_postSysDict(self, request):
self._oprot.writeMessageBegin('postSysDict', TMessageType.CALL, self._seqid)
args = postSysDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_postSysDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = postSysDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "postSysDict failed: unknown result")
def putSysDict(self, request):
"""
Parameters:
- request
"""
self.send_putSysDict(request)
return self.recv_putSysDict()
def send_putSysDict(self, request):
self._oprot.writeMessageBegin('putSysDict', TMessageType.CALL, self._seqid)
args = putSysDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putSysDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putSysDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putSysDict failed: unknown result")
def getSysDicts(self, request):
"""
Parameters:
- request
"""
self.send_getSysDicts(request)
return self.recv_getSysDicts()
def send_getSysDicts(self, request):
self._oprot.writeMessageBegin('getSysDicts', TMessageType.CALL, self._seqid)
args = getSysDicts_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSysDicts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSysDicts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSysDicts failed: unknown result")
def getSysDict(self, request):
"""
Parameters:
- request
"""
self.send_getSysDict(request)
return self.recv_getSysDict()
def send_getSysDict(self, request):
self._oprot.writeMessageBegin('getSysDict', TMessageType.CALL, self._seqid)
args = getSysDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSysDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSysDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSysDict failed: unknown result")
def refSysDict(self, request):
"""
Parameters:
- request
"""
self.send_refSysDict(request)
return self.recv_refSysDict()
def send_refSysDict(self, request):
self._oprot.writeMessageBegin('refSysDict', TMessageType.CALL, self._seqid)
args = refSysDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_refSysDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = refSysDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "refSysDict failed: unknown result")
def unrefSysDict(self, request):
"""
Parameters:
- request
"""
self.send_unrefSysDict(request)
return self.recv_unrefSysDict()
def send_unrefSysDict(self, request):
self._oprot.writeMessageBegin('unrefSysDict', TMessageType.CALL, self._seqid)
args = unrefSysDict_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_unrefSysDict(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = unrefSysDict_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "unrefSysDict failed: unknown result")
def myDicts(self, request):
"""
Parameters:
- request
"""
self.send_myDicts(request)
return self.recv_myDicts()
def send_myDicts(self, request):
self._oprot.writeMessageBegin('myDicts', TMessageType.CALL, self._seqid)
args = myDicts_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_myDicts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = myDicts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "myDicts failed: unknown result")
def mySysdicts(self, request):
"""
Parameters:
- request
"""
self.send_mySysdicts(request)
return self.recv_mySysdicts()
def send_mySysdicts(self, request):
self._oprot.writeMessageBegin('mySysdicts', TMessageType.CALL, self._seqid)
args = mySysdicts_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_mySysdicts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = mySysdicts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "mySysdicts failed: unknown result")
def putDictWord(self, request):
"""
Parameters:
- request
"""
self.send_putDictWord(request)
return self.recv_putDictWord()
def send_putDictWord(self, request):
self._oprot.writeMessageBegin('putDictWord', TMessageType.CALL, self._seqid)
args = putDictWord_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putDictWord(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putDictWord_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putDictWord failed: unknown result")
def getDictWords(self, request):
"""
Parameters:
- request
"""
self.send_getDictWords(request)
return self.recv_getDictWords()
def send_getDictWords(self, request):
self._oprot.writeMessageBegin('getDictWords', TMessageType.CALL, self._seqid)
args = getDictWords_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getDictWords(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getDictWords_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getDictWords failed: unknown result")
def delDictWord(self, request):
"""
Parameters:
- request
"""
self.send_delDictWord(request)
return self.recv_delDictWord()
def send_delDictWord(self, request):
self._oprot.writeMessageBegin('delDictWord', TMessageType.CALL, self._seqid)
args = delDictWord_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delDictWord(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delDictWord_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "delDictWord failed: unknown result")
def hasDictWord(self, request):
"""
Parameters:
- request
"""
self.send_hasDictWord(request)
return self.recv_hasDictWord()
def send_hasDictWord(self, request):
self._oprot.writeMessageBegin('hasDictWord', TMessageType.CALL, self._seqid)
args = hasDictWord_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_hasDictWord(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = hasDictWord_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "hasDictWord failed: unknown result")
def postIntent(self, request):
"""
意图
Parameters:
- request
"""
self.send_postIntent(request)
return self.recv_postIntent()
def send_postIntent(self, request):
self._oprot.writeMessageBegin('postIntent', TMessageType.CALL, self._seqid)
args = postIntent_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_postIntent(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = postIntent_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "postIntent failed: unknown result")
def putIntent(self, request):
"""
Parameters:
- request
"""
self.send_putIntent(request)
return self.recv_putIntent()
def send_putIntent(self, request):
self._oprot.writeMessageBegin('putIntent', TMessageType.CALL, self._seqid)
args = putIntent_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putIntent(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putIntent_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putIntent failed: unknown result")
def getIntents(self, request):
"""
Parameters:
- request
"""
self.send_getIntents(request)
return self.recv_getIntents()
def send_getIntents(self, request):
self._oprot.writeMessageBegin('getIntents', TMessageType.CALL, self._seqid)
args = getIntents_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getIntents(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getIntents_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getIntents failed: unknown result")
def getIntent(self, request):
"""
Parameters:
- request
"""
self.send_getIntent(request)
return self.recv_getIntent()
def send_getIntent(self, request):
self._oprot.writeMessageBegin('getIntent', TMessageType.CALL, self._seqid)
args = getIntent_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getIntent(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getIntent_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getIntent failed: unknown result")
def delIntent(self, request):
"""
Parameters:
- request
"""
self.send_delIntent(request)
return self.recv_delIntent()
def send_delIntent(self, request):
self._oprot.writeMessageBegin('delIntent', TMessageType.CALL, self._seqid)
args = delIntent_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delIntent(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delIntent_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "delIntent failed: unknown result")
def postUtter(self, request):
"""
Parameters:
- request
"""
self.send_postUtter(request)
return self.recv_postUtter()
def send_postUtter(self, request):
self._oprot.writeMessageBegin('postUtter', TMessageType.CALL, self._seqid)
args = postUtter_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_postUtter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = postUtter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "postUtter failed: unknown result")
def putUtter(self, request):
"""
Parameters:
- request
"""
self.send_putUtter(request)
return self.recv_putUtter()
def send_putUtter(self, request):
self._oprot.writeMessageBegin('putUtter', TMessageType.CALL, self._seqid)
args = putUtter_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putUtter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putUtter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putUtter failed: unknown result")
def getUtters(self, request):
"""
Parameters:
- request
"""
self.send_getUtters(request)
return self.recv_getUtters()
def send_getUtters(self, request):
self._oprot.writeMessageBegin('getUtters', TMessageType.CALL, self._seqid)
args = getUtters_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUtters(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getUtters_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUtters failed: unknown result")
def getUtter(self, request):
"""
Parameters:
- request
"""
self.send_getUtter(request)
return self.recv_getUtter()
def send_getUtter(self, request):
self._oprot.writeMessageBegin('getUtter', TMessageType.CALL, self._seqid)
args = getUtter_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUtter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getUtter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUtter failed: unknown result")
def delUtter(self, request):
"""
Parameters:
- request
"""
self.send_delUtter(request)
return self.recv_delUtter()
def send_delUtter(self, request):
self._oprot.writeMessageBegin('delUtter', TMessageType.CALL, self._seqid)
args = delUtter_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delUtter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delUtter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "delUtter failed: unknown result")
def postSlot(self, request):
"""
Parameters:
- request
"""
self.send_postSlot(request)
return self.recv_postSlot()
def send_postSlot(self, request):
self._oprot.writeMessageBegin('postSlot', TMessageType.CALL, self._seqid)
args = postSlot_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_postSlot(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = postSlot_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "postSlot failed: unknown result")
def putSlot(self, request):
"""
Parameters:
- request
"""
self.send_putSlot(request)
return self.recv_putSlot()
def send_putSlot(self, request):
self._oprot.writeMessageBegin('putSlot', TMessageType.CALL, self._seqid)
args = putSlot_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putSlot(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putSlot_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putSlot failed: unknown result")
def getSlots(self, request):
"""
Parameters:
- request
"""
self.send_getSlots(request)
return self.recv_getSlots()
def send_getSlots(self, request):
self._oprot.writeMessageBegin('getSlots', TMessageType.CALL, self._seqid)
args = getSlots_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSlots(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSlots_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSlots failed: unknown result")
def getSlot(self, request):
"""
Parameters:
- request
"""
self.send_getSlot(request)
return self.recv_getSlot()
def send_getSlot(self, request):
self._oprot.writeMessageBegin('getSlot', TMessageType.CALL, self._seqid)
args = getSlot_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSlot(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSlot_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSlot failed: unknown result")
def delSlot(self, request):
"""
Parameters:
- request
"""
self.send_delSlot(request)
return self.recv_delSlot()
def send_delSlot(self, request):
self._oprot.writeMessageBegin('delSlot', TMessageType.CALL, self._seqid)
args = delSlot_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delSlot(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delSlot_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "delSlot failed: unknown result")
def train(self, request):
"""
对话管理
Parameters:
- request
"""
self.send_train(request)
return self.recv_train()
def send_train(self, request):
self._oprot.writeMessageBegin('train', TMessageType.CALL, self._seqid)
args = train_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_train(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = train_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "train failed: unknown result")
def status(self, request):
"""
Parameters:
- request
"""
self.send_status(request)
return self.recv_status()
def send_status(self, request):
self._oprot.writeMessageBegin('status', TMessageType.CALL, self._seqid)
args = status_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_status(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = status_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "status failed: unknown result")
def devver(self, request):
"""
Parameters:
- request
"""
self.send_devver(request)
return self.recv_devver()
def send_devver(self, request):
self._oprot.writeMessageBegin('devver', TMessageType.CALL, self._seqid)
args = devver_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_devver(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = devver_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "devver failed: unknown result")
def prover(self, request):
"""
Parameters:
- request
"""
self.send_prover(request)
return self.recv_prover()
def send_prover(self, request):
self._oprot.writeMessageBegin('prover', TMessageType.CALL, self._seqid)
args = prover_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_prover(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = prover_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "prover failed: unknown result")
def version(self, request):
"""
Parameters:
- request
"""
self.send_version(request)
return self.recv_version()
def send_version(self, request):
self._oprot.writeMessageBegin('version', TMessageType.CALL, self._seqid)
args = version_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_version(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = version_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "version failed: unknown result")
def chat(self, request):
"""
Parameters:
- request
"""
self.send_chat(request)
return self.recv_chat()
def send_chat(self, request):
self._oprot.writeMessageBegin('chat', TMessageType.CALL, self._seqid)
args = chat_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_chat(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = chat_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "chat failed: unknown result")
def online(self, request):
"""
Parameters:
- request
"""
self.send_online(request)
return self.recv_online()
def send_online(self, request):
self._oprot.writeMessageBegin('online', TMessageType.CALL, self._seqid)
args = online_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_online(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = online_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "online failed: unknown result")
def offline(self, request):
"""
Parameters:
- request
"""
self.send_offline(request)
return self.recv_offline()
def send_offline(self, request):
self._oprot.writeMessageBegin('offline', TMessageType.CALL, self._seqid)
args = offline_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_offline(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = offline_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "offline failed: unknown result")
def putSession(self, request):
"""
Parameters:
- request
"""
self.send_putSession(request)
return self.recv_putSession()
def send_putSession(self, request):
self._oprot.writeMessageBegin('putSession', TMessageType.CALL, self._seqid)
args = putSession_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putSession(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putSession_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "putSession failed: unknown result")
def getSession(self, request):
"""
Parameters:
- request
"""
self.send_getSession(request)
return self.recv_getSession()
def send_getSession(self, request):
self._oprot.writeMessageBegin('getSession', TMessageType.CALL, self._seqid)
args = getSession_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSession(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSession_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSession failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["postCustomDict"] = Processor.process_postCustomDict
self._processMap["putCustomDict"] = Processor.process_putCustomDict
self._processMap["getCustomDicts"] = Processor.process_getCustomDicts
self._processMap["getCustomDict"] = Processor.process_getCustomDict
self._processMap["delCustomDict"] = Processor.process_delCustomDict
self._processMap["postSysDict"] = Processor.process_postSysDict
self._processMap["putSysDict"] = Processor.process_putSysDict
self._processMap["getSysDicts"] = Processor.process_getSysDicts
self._processMap["getSysDict"] = Processor.process_getSysDict
self._processMap["refSysDict"] = Processor.process_refSysDict
self._processMap["unrefSysDict"] = Processor.process_unrefSysDict
self._processMap["myDicts"] = Processor.process_myDicts
self._processMap["mySysdicts"] = Processor.process_mySysdicts
self._processMap["putDictWord"] = Processor.process_putDictWord
self._processMap["getDictWords"] = Processor.process_getDictWords
self._processMap["delDictWord"] = Processor.process_delDictWord
self._processMap["hasDictWord"] = Processor.process_hasDictWord
self._processMap["postIntent"] = Processor.process_postIntent
self._processMap["putIntent"] = Processor.process_putIntent
self._processMap["getIntents"] = Processor.process_getIntents
self._processMap["getIntent"] = Processor.process_getIntent
self._processMap["delIntent"] = Processor.process_delIntent
self._processMap["postUtter"] = Processor.process_postUtter
self._processMap["putUtter"] = Processor.process_putUtter
self._processMap["getUtters"] = Processor.process_getUtters
self._processMap["getUtter"] = Processor.process_getUtter
self._processMap["delUtter"] = Processor.process_delUtter
self._processMap["postSlot"] = Processor.process_postSlot
self._processMap["putSlot"] = Processor.process_putSlot
self._processMap["getSlots"] = Processor.process_getSlots
self._processMap["getSlot"] = Processor.process_getSlot
self._processMap["delSlot"] = Processor.process_delSlot
self._processMap["train"] = Processor.process_train
self._processMap["status"] = Processor.process_status
self._processMap["devver"] = Processor.process_devver
self._processMap["prover"] = Processor.process_prover
self._processMap["version"] = Processor.process_version
self._processMap["chat"] = Processor.process_chat
self._processMap["online"] = Processor.process_online
self._processMap["offline"] = Processor.process_offline
self._processMap["putSession"] = Processor.process_putSession
self._processMap["getSession"] = Processor.process_getSession
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_postCustomDict(self, seqid, iprot, oprot):
args = postCustomDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = postCustomDict_result()
try:
result.success = self._handler.postCustomDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("postCustomDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putCustomDict(self, seqid, iprot, oprot):
args = putCustomDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = putCustomDict_result()
try:
result.success = self._handler.putCustomDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putCustomDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getCustomDicts(self, seqid, iprot, oprot):
args = getCustomDicts_args()
args.read(iprot)
iprot.readMessageEnd()
result = getCustomDicts_result()
try:
result.success = self._handler.getCustomDicts(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getCustomDicts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getCustomDict(self, seqid, iprot, oprot):
args = getCustomDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = getCustomDict_result()
try:
result.success = self._handler.getCustomDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getCustomDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delCustomDict(self, seqid, iprot, oprot):
args = delCustomDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = delCustomDict_result()
try:
result.success = self._handler.delCustomDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delCustomDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_postSysDict(self, seqid, iprot, oprot):
args = postSysDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = postSysDict_result()
try:
result.success = self._handler.postSysDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("postSysDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putSysDict(self, seqid, iprot, oprot):
args = putSysDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = putSysDict_result()
try:
result.success = self._handler.putSysDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putSysDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSysDicts(self, seqid, iprot, oprot):
args = getSysDicts_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSysDicts_result()
try:
result.success = self._handler.getSysDicts(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSysDicts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSysDict(self, seqid, iprot, oprot):
args = getSysDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSysDict_result()
try:
result.success = self._handler.getSysDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSysDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_refSysDict(self, seqid, iprot, oprot):
args = refSysDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = refSysDict_result()
try:
result.success = self._handler.refSysDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("refSysDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unrefSysDict(self, seqid, iprot, oprot):
args = unrefSysDict_args()
args.read(iprot)
iprot.readMessageEnd()
result = unrefSysDict_result()
try:
result.success = self._handler.unrefSysDict(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("unrefSysDict", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_myDicts(self, seqid, iprot, oprot):
args = myDicts_args()
args.read(iprot)
iprot.readMessageEnd()
result = myDicts_result()
try:
result.success = self._handler.myDicts(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("myDicts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_mySysdicts(self, seqid, iprot, oprot):
args = mySysdicts_args()
args.read(iprot)
iprot.readMessageEnd()
result = mySysdicts_result()
try:
result.success = self._handler.mySysdicts(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("mySysdicts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putDictWord(self, seqid, iprot, oprot):
args = putDictWord_args()
args.read(iprot)
iprot.readMessageEnd()
result = putDictWord_result()
try:
result.success = self._handler.putDictWord(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putDictWord", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getDictWords(self, seqid, iprot, oprot):
args = getDictWords_args()
args.read(iprot)
iprot.readMessageEnd()
result = getDictWords_result()
try:
result.success = self._handler.getDictWords(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getDictWords", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delDictWord(self, seqid, iprot, oprot):
args = delDictWord_args()
args.read(iprot)
iprot.readMessageEnd()
result = delDictWord_result()
try:
result.success = self._handler.delDictWord(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delDictWord", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_hasDictWord(self, seqid, iprot, oprot):
args = hasDictWord_args()
args.read(iprot)
iprot.readMessageEnd()
result = hasDictWord_result()
try:
result.success = self._handler.hasDictWord(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("hasDictWord", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_postIntent(self, seqid, iprot, oprot):
args = postIntent_args()
args.read(iprot)
iprot.readMessageEnd()
result = postIntent_result()
try:
result.success = self._handler.postIntent(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("postIntent", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putIntent(self, seqid, iprot, oprot):
args = putIntent_args()
args.read(iprot)
iprot.readMessageEnd()
result = putIntent_result()
try:
result.success = self._handler.putIntent(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putIntent", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getIntents(self, seqid, iprot, oprot):
args = getIntents_args()
args.read(iprot)
iprot.readMessageEnd()
result = getIntents_result()
try:
result.success = self._handler.getIntents(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getIntents", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getIntent(self, seqid, iprot, oprot):
args = getIntent_args()
args.read(iprot)
iprot.readMessageEnd()
result = getIntent_result()
try:
result.success = self._handler.getIntent(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getIntent", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delIntent(self, seqid, iprot, oprot):
args = delIntent_args()
args.read(iprot)
iprot.readMessageEnd()
result = delIntent_result()
try:
result.success = self._handler.delIntent(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delIntent", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_postUtter(self, seqid, iprot, oprot):
args = postUtter_args()
args.read(iprot)
iprot.readMessageEnd()
result = postUtter_result()
try:
result.success = self._handler.postUtter(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("postUtter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putUtter(self, seqid, iprot, oprot):
args = putUtter_args()
args.read(iprot)
iprot.readMessageEnd()
result = putUtter_result()
try:
result.success = self._handler.putUtter(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putUtter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUtters(self, seqid, iprot, oprot):
args = getUtters_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUtters_result()
try:
result.success = self._handler.getUtters(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getUtters", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUtter(self, seqid, iprot, oprot):
args = getUtter_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUtter_result()
try:
result.success = self._handler.getUtter(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getUtter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delUtter(self, seqid, iprot, oprot):
args = delUtter_args()
args.read(iprot)
iprot.readMessageEnd()
result = delUtter_result()
try:
result.success = self._handler.delUtter(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delUtter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_postSlot(self, seqid, iprot, oprot):
args = postSlot_args()
args.read(iprot)
iprot.readMessageEnd()
result = postSlot_result()
try:
result.success = self._handler.postSlot(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("postSlot", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putSlot(self, seqid, iprot, oprot):
args = putSlot_args()
args.read(iprot)
iprot.readMessageEnd()
result = putSlot_result()
try:
result.success = self._handler.putSlot(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putSlot", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSlots(self, seqid, iprot, oprot):
args = getSlots_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSlots_result()
try:
result.success = self._handler.getSlots(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSlots", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSlot(self, seqid, iprot, oprot):
args = getSlot_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSlot_result()
try:
result.success = self._handler.getSlot(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSlot", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delSlot(self, seqid, iprot, oprot):
args = delSlot_args()
args.read(iprot)
iprot.readMessageEnd()
result = delSlot_result()
try:
result.success = self._handler.delSlot(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delSlot", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_train(self, seqid, iprot, oprot):
args = train_args()
args.read(iprot)
iprot.readMessageEnd()
result = train_result()
try:
result.success = self._handler.train(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("train", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_status(self, seqid, iprot, oprot):
args = status_args()
args.read(iprot)
iprot.readMessageEnd()
result = status_result()
try:
result.success = self._handler.status(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("status", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_devver(self, seqid, iprot, oprot):
args = devver_args()
args.read(iprot)
iprot.readMessageEnd()
result = devver_result()
try:
result.success = self._handler.devver(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("devver", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_prover(self, seqid, iprot, oprot):
args = prover_args()
args.read(iprot)
iprot.readMessageEnd()
result = prover_result()
try:
result.success = self._handler.prover(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("prover", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_version(self, seqid, iprot, oprot):
args = version_args()
args.read(iprot)
iprot.readMessageEnd()
result = version_result()
try:
result.success = self._handler.version(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("version", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_chat(self, seqid, iprot, oprot):
args = chat_args()
args.read(iprot)
iprot.readMessageEnd()
result = chat_result()
try:
result.success = self._handler.chat(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("chat", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_online(self, seqid, iprot, oprot):
args = online_args()
args.read(iprot)
iprot.readMessageEnd()
result = online_result()
try:
result.success = self._handler.online(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("online", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_offline(self, seqid, iprot, oprot):
args = offline_args()
args.read(iprot)
iprot.readMessageEnd()
result = offline_result()
try:
result.success = self._handler.offline(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("offline", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putSession(self, seqid, iprot, oprot):
args = putSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = putSession_result()
try:
result.success = self._handler.putSession(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putSession", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSession(self, seqid, iprot, oprot):
args = getSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSession_result()
try:
result.success = self._handler.getSession(args.request)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSession", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class postCustomDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postCustomDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postCustomDict_args)
postCustomDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class postCustomDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postCustomDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postCustomDict_result)
postCustomDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putCustomDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putCustomDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putCustomDict_args)
putCustomDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putCustomDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putCustomDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putCustomDict_result)
putCustomDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getCustomDicts_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getCustomDicts_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getCustomDicts_args)
getCustomDicts_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getCustomDicts_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getCustomDicts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getCustomDicts_result)
getCustomDicts_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getCustomDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getCustomDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getCustomDict_args)
getCustomDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getCustomDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getCustomDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getCustomDict_result)
getCustomDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class delCustomDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delCustomDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delCustomDict_args)
delCustomDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class delCustomDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delCustomDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delCustomDict_result)
delCustomDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class postSysDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postSysDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postSysDict_args)
postSysDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class postSysDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postSysDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postSysDict_result)
postSysDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putSysDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSysDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSysDict_args)
putSysDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putSysDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSysDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSysDict_result)
putSysDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getSysDicts_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSysDicts_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSysDicts_args)
getSysDicts_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getSysDicts_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSysDicts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSysDicts_result)
getSysDicts_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getSysDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSysDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSysDict_args)
getSysDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getSysDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSysDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSysDict_result)
getSysDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class refSysDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('refSysDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(refSysDict_args)
refSysDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class refSysDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('refSysDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(refSysDict_result)
refSysDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class unrefSysDict_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('unrefSysDict_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(unrefSysDict_args)
unrefSysDict_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class unrefSysDict_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('unrefSysDict_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(unrefSysDict_result)
unrefSysDict_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class myDicts_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('myDicts_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(myDicts_args)
myDicts_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class myDicts_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('myDicts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(myDicts_result)
myDicts_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class mySysdicts_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('mySysdicts_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(mySysdicts_args)
mySysdicts_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class mySysdicts_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('mySysdicts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(mySysdicts_result)
mySysdicts_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putDictWord_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putDictWord_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putDictWord_args)
putDictWord_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putDictWord_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putDictWord_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putDictWord_result)
putDictWord_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getDictWords_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getDictWords_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getDictWords_args)
getDictWords_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getDictWords_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getDictWords_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getDictWords_result)
getDictWords_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class delDictWord_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delDictWord_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delDictWord_args)
delDictWord_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class delDictWord_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delDictWord_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delDictWord_result)
delDictWord_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class hasDictWord_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('hasDictWord_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(hasDictWord_args)
hasDictWord_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class hasDictWord_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('hasDictWord_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(hasDictWord_result)
hasDictWord_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class postIntent_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postIntent_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postIntent_args)
postIntent_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class postIntent_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postIntent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postIntent_result)
postIntent_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putIntent_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putIntent_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putIntent_args)
putIntent_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putIntent_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putIntent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putIntent_result)
putIntent_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getIntents_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getIntents_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getIntents_args)
getIntents_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getIntents_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getIntents_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getIntents_result)
getIntents_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getIntent_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getIntent_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getIntent_args)
getIntent_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getIntent_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getIntent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getIntent_result)
getIntent_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class delIntent_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delIntent_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delIntent_args)
delIntent_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class delIntent_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delIntent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delIntent_result)
delIntent_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class postUtter_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postUtter_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postUtter_args)
postUtter_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class postUtter_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postUtter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postUtter_result)
postUtter_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putUtter_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putUtter_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putUtter_args)
putUtter_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putUtter_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putUtter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putUtter_result)
putUtter_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getUtters_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUtters_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUtters_args)
getUtters_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getUtters_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUtters_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUtters_result)
getUtters_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getUtter_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUtter_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUtter_args)
getUtter_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getUtter_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUtter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUtter_result)
getUtter_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class delUtter_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delUtter_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delUtter_args)
delUtter_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class delUtter_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delUtter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delUtter_result)
delUtter_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class postSlot_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postSlot_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postSlot_args)
postSlot_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class postSlot_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('postSlot_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(postSlot_result)
postSlot_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putSlot_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSlot_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSlot_args)
putSlot_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putSlot_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSlot_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSlot_result)
putSlot_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getSlots_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSlots_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSlots_args)
getSlots_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getSlots_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSlots_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSlots_result)
getSlots_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getSlot_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSlot_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSlot_args)
getSlot_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getSlot_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSlot_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSlot_result)
getSlot_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class delSlot_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delSlot_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delSlot_args)
delSlot_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class delSlot_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('delSlot_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(delSlot_result)
delSlot_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class train_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('train_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(train_args)
train_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class train_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('train_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(train_result)
train_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class status_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('status_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(status_args)
status_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class status_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('status_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(status_result)
status_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class devver_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('devver_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(devver_args)
devver_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class devver_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('devver_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(devver_result)
devver_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class prover_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('prover_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(prover_args)
prover_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class prover_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('prover_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(prover_result)
prover_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class version_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_args)
version_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class version_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_result)
version_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class chat_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('chat_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(chat_args)
chat_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class chat_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('chat_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(chat_result)
chat_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class online_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('online_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(online_args)
online_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class online_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('online_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(online_result)
online_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class offline_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('offline_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(offline_args)
offline_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class offline_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('offline_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(offline_result)
offline_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class putSession_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSession_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSession_args)
putSession_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class putSession_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('putSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(putSession_result)
putSession_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
class getSession_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = Data()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSession_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSession_args)
getSession_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', [Data, None], None, ), # 1
)
class getSession_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Data()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSession_result)
getSession_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Data, None], None, ), # 0
)
fix_spec(all_structs)
del all_structs
| 31.854861
| 134
| 0.588643
| 26,094
| 255,253
| 5.506208
| 0.007396
| 0.016112
| 0.029002
| 0.014031
| 0.916557
| 0.890513
| 0.855838
| 0.84141
| 0.84141
| 0.84141
| 0
| 0.00217
| 0.308647
| 255,253
| 8,012
| 135
| 31.858837
| 0.812013
| 0.016905
| 0
| 0.856838
| 1
| 0
| 0.037949
| 0.00017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.131657
| false
| 0.006903
| 0.001315
| 0.04142
| 0.244247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ed351f79340c8cfb209e4c4cfaa7af66886fd466
| 883
|
py
|
Python
|
tests/stats_benchmark_params/continuous_categorical.py
|
fossabot/many
|
7f79b0bf1007e2fd412db9f5f5df29f487ebf2ae
|
[
"MIT"
] | 3
|
2020-09-10T05:06:19.000Z
|
2021-10-02T18:42:44.000Z
|
tests/stats_benchmark_params/continuous_categorical.py
|
fossabot/many
|
7f79b0bf1007e2fd412db9f5f5df29f487ebf2ae
|
[
"MIT"
] | 21
|
2020-10-29T18:32:46.000Z
|
2022-03-24T10:19:41.000Z
|
tests/stats_benchmark_params/continuous_categorical.py
|
fossabot/many
|
7f79b0bf1007e2fd412db9f5f5df29f487ebf2ae
|
[
"MIT"
] | 2
|
2020-10-29T18:02:50.000Z
|
2021-10-02T18:42:45.000Z
|
import many
b_num_cols = [8, 16, 32, 64, 128, 256, 512, 1024, 2048]
params = []
for b_num_col in b_num_cols:
params.append(
[
many.stats.mat_mwu_naive,
many.stats.mat_mwu,
1000,
100,
b_num_col,
"continuous",
"binary",
False,
False,
{"effect": "rank_biserial", "melt": False},
["effects", "pvals"],
True,
]
)
for b_num_col in b_num_cols:
params.append(
[
many.stats.mat_mwu_naive,
many.stats.mat_mwu_gpu,
1000,
100,
b_num_col,
"continuous",
"binary",
False,
False,
{"effect": "rank_biserial", "melt": False},
["effects", "pvals"],
True,
]
)
| 19.622222
| 55
| 0.423556
| 88
| 883
| 3.988636
| 0.409091
| 0.079772
| 0.079772
| 0.17094
| 0.854701
| 0.854701
| 0.854701
| 0.854701
| 0.854701
| 0.854701
| 0
| 0.079832
| 0.460929
| 883
| 44
| 56
| 20.068182
| 0.657563
| 0
| 0
| 0.702703
| 0
| 0
| 0.115515
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.027027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ed35d74fb3d8769757a2f169a5b031f3d18bbf0c
| 94
|
py
|
Python
|
tests/common/context.py
|
tobse80/rof-helpers
|
d6fa4bbb641c82bb69b73b8c9f06587a0d7bdd97
|
[
"MIT"
] | null | null | null |
tests/common/context.py
|
tobse80/rof-helpers
|
d6fa4bbb641c82bb69b73b8c9f06587a0d7bdd97
|
[
"MIT"
] | null | null | null |
tests/common/context.py
|
tobse80/rof-helpers
|
d6fa4bbb641c82bb69b73b8c9f06587a0d7bdd97
|
[
"MIT"
] | null | null | null |
# For testing a local package, import it via tests.context
import tests.context
import common
| 23.5
| 58
| 0.808511
| 15
| 94
| 5.066667
| 0.733333
| 0.315789
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 94
| 3
| 59
| 31.333333
| 0.95
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed49d8716dc80323b0c148a2c51240883e8202ee
| 1,462
|
py
|
Python
|
tests/test_textutils.py
|
lsst-sqre/templatekit
|
10d9e64056aa7e103d91ec78a4d3feb70c3eb5e2
|
[
"MIT"
] | 1
|
2020-02-06T18:26:51.000Z
|
2020-02-06T18:26:51.000Z
|
tests/test_textutils.py
|
lsst-sqre/templatekit
|
10d9e64056aa7e103d91ec78a4d3feb70c3eb5e2
|
[
"MIT"
] | 6
|
2019-04-16T18:34:21.000Z
|
2022-03-15T22:36:32.000Z
|
tests/test_textutils.py
|
lsst-sqre/templatekit
|
10d9e64056aa7e103d91ec78a4d3feb70c3eb5e2
|
[
"MIT"
] | null | null | null |
"""Tests for the templatekit.textutils module.
"""
from templatekit.textutils import reformat_content_lines
def test_reformat_content_lines() -> None:
"""Test reformatting text (a Python comment block)."""
sample = "Line 1\n" "Line 2\n"
expected = "# Line 1\n" "# Line 2\n"
result = reformat_content_lines(sample, "# {}")
assert result == expected
def test_reformat_content_lines_no_final_newline() -> None:
"""Same as `test_reformat_content_lines` except the original content
lacks a final newline.
"""
sample = "Line 1\n" "Line 2"
expected = "# Line 1\n" "# Line 2\n"
result = reformat_content_lines(sample, "# {}")
assert result == expected
def test_reformat_content_lines_header_footer() -> None:
"""Test reformatting text and including a header and footer (like a C++
comment block).
"""
sample = "Line 1\n" "Line 2\n"
expected = "/*\n" " * Line 1\n" " * Line 2\n" " */\n"
result = reformat_content_lines(sample, " * {}", header="/*", footer=" */")
assert result == expected
def test_reformat_content_lines_header_footer_no_final_newline() -> None:
"""Same as `test_reformat_content_lines_header_footer` except the original
content lacks a final newline.
"""
sample = "Line 1\n" "Line 2"
expected = "/*\n" " * Line 1\n" " * Line 2\n" " */\n"
result = reformat_content_lines(sample, " * {}", header="/*", footer=" */")
assert result == expected
| 34
| 79
| 0.649795
| 192
| 1,462
| 4.739583
| 0.21875
| 0.181319
| 0.241758
| 0.087912
| 0.806593
| 0.776923
| 0.763736
| 0.763736
| 0.763736
| 0.763736
| 0
| 0.01371
| 0.201778
| 1,462
| 42
| 80
| 34.809524
| 0.766067
| 0.252394
| 0
| 0.761905
| 0
| 0
| 0.182868
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.190476
| false
| 0
| 0.047619
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ed6bbfcada349062b9c36c879ae8a56086f8637d
| 45
|
py
|
Python
|
careless/models/priors/__init__.py
|
JBGreisman/careless
|
8f6c0859973757d11b26b65d9dc51d443030aa70
|
[
"MIT"
] | 5
|
2021-02-08T16:34:38.000Z
|
2022-03-25T19:16:09.000Z
|
careless/models/priors/__init__.py
|
JBGreisman/careless
|
8f6c0859973757d11b26b65d9dc51d443030aa70
|
[
"MIT"
] | 28
|
2021-01-15T21:31:40.000Z
|
2022-03-30T21:06:54.000Z
|
careless/models/priors/__init__.py
|
JBGreisman/careless
|
8f6c0859973757d11b26b65d9dc51d443030aa70
|
[
"MIT"
] | 5
|
2021-02-12T18:43:58.000Z
|
2022-02-02T21:38:56.000Z
|
from . import empirical
from . import wilson
| 15
| 23
| 0.777778
| 6
| 45
| 5.833333
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 24
| 22.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c0c15bcea43f2dee40c6ff8945df5ead39447f3
| 41,348
|
py
|
Python
|
mlonmcu/feature/features.py
|
PhilippvK/mlonmcu
|
6b5ed9b2abe8d3caa18c20a604547513e8097b49
|
[
"Apache-2.0"
] | null | null | null |
mlonmcu/feature/features.py
|
PhilippvK/mlonmcu
|
6b5ed9b2abe8d3caa18c20a604547513e8097b49
|
[
"Apache-2.0"
] | null | null | null |
mlonmcu/feature/features.py
|
PhilippvK/mlonmcu
|
6b5ed9b2abe8d3caa18c20a604547513e8097b49
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2022 TUM Department of Electrical and Computer Engineering.
#
# This file is part of MLonMCU.
# See https://github.com/tum-ei-eda/mlonmcu.git for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Definition of MLonMCU features and the feature registry."""
from pathlib import Path
from .feature import (
BackendFeature,
FrameworkFeature,
PlatformFeature,
FrontendFeature,
TargetFeature,
SetupFeature,
RunFeature,
FeatureBase,
)
from mlonmcu.utils import is_power_of_two
def filter_none(data):
"""Helper function which drop dict items with a None value."""
assert isinstance(data, dict), "Dict only"
out = {key: value for key, value in data.items() if value is not None}
return out
REGISTERED_FEATURES = {}
def register_feature(name):
"""Decorator for adding a feature to the global registry."""
def real_decorator(obj):
REGISTERED_FEATURES[name] = obj
return real_decorator
def get_available_feature_names(feature_type=None):
"""Utility for getting feature names."""
ret = []
if feature_type is None:
return REGISTERED_FEATURES.keys()
for name, feature in REGISTERED_FEATURES.items():
if feature_type in list(feature.types()):
ret.append(name)
return ret
def get_available_features(feature_type=None, feature_name=None):
"""Utility for looking up features."""
names = get_available_feature_names(feature_type=feature_type)
return [REGISTERED_FEATURES[name] for name in names if feature_name is None or name == feature_name]
def get_matching_features(features, feature_type):
return [feature for feature in features if feature_type in feature.types()]
@register_feature("debug_arena")
class DebugArena(BackendFeature, PlatformFeature):
"""Enable verbose printing of arena usage for debugging."""
def __init__(self, config=None):
super().__init__("debug_arena", config=config)
def get_backend_config(self, backend):
assert backend in [
"tvmaot",
"tvmcg",
"tvmrt",
], f"Unsupported feature '{self.name}' for backend '{backend}'"
# TODO: TFLM also compatible?
return {f"{backend}.debug_arena": self.enabled}
# def get_platform_config(self):
# return {"mlif.debug_arena": True}
def get_cmake_args(self):
val = "ON" if self.enabled else "OFF"
return [f"-DDEBUG_ARENA={val}"]
@register_feature("validate")
class Validate(FrontendFeature, PlatformFeature):
"""Enable validaton of inout and output tensors."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"allow_missing": True,
}
def __init__(self, config=None):
super().__init__("validate", config=config)
@property
def allow_missing(self):
return bool(self.config["allow_missing"])
def get_frontend_config(self, frontend):
if not self.allow_missing:
raise NotImplementedError
return {f"{frontend}.use_inout_data": True}
def get_platform_config(self, platform):
assert platform == "mlif", f"Unsupported feature '{self.name}' for platform '{platform}'"
return {f"{platform}.ignore_data": False}
# def get_cmake_args(self):
# pass
@register_feature("muriscvnn")
class Muriscvnn(SetupFeature, FrameworkFeature):
"""MuriscvNN CMSIS-NN wrappers for TFLite Micro"""
REQUIRED = ["muriscvnn.lib", "muriscvnn.inc_dir"]
def __init__(self, config=None):
super().__init__("muriscvnn", config=config)
@property
def muriscvnn_lib(self):
return str(self.config["muriscvnn.lib"])
@property
def muriscvnn_inc_dir(self):
return str(self.config["muriscvnn.inc_dir"])
def add_framework_config(self, framework, config):
assert framework == "tflite", f"Unsupported feature '{self.name}' for framework '{framework}'"
if f"{framework}.optimized_kernel" in config and config[f"{framework}.optimized_kernel"] not in [
None,
"cmsis_nn",
]:
RuntimeError(f"There is already a optimized_kernel selected for framework '{framework}'")
else:
config[f"{framework}.optimized_kernel"] = "cmsis_nn"
libs = config.get(f"{framework}.optimized_kernel_libs", [])
libs.append(self.muriscvnn_lib)
incs = config.get(f"{framework}.optimized_kernel_inc_dirs", [])
incs.append(self.muriscvnn_inc_dir)
config[f"{framework}.optimized_kernel_libs"] = libs
config[f"{framework}.optimized_kernel_inc_dirs"] = incs
def get_required_cache_flags(self):
ret = {}
ret["tflmc.exe"] = ["muriscvnn"]
return ret
@register_feature("cmsisnn")
class Cmsisnn(SetupFeature, FrameworkFeature):
"""CMSIS-NN kernels for TFLite Micro/TVM"""
REQUIRED = ["cmsisnn.lib", "cmsisnn.dir"]
def __init__(self, config=None):
super().__init__("cmsisnn", config=config)
@property
def cmsisnn_lib(self):
return str(self.config["cmsisnn.lib"])
@property
def cmsisnn_dir(self):
return str(self.config["cmsisnn.dir"])
def add_framework_config(self, framework, config):
assert framework == "tflite", f"Unsupported feature '{self.name}' for framework '{framework}'"
if f"{framework}.optimized_kernel" in config and config[f"{framework}.optimized_kernel"] not in [
None,
"cmsis_nn",
]:
RuntimeError(f"There is already a optimized_kernel selected for framework '{framework}'")
else:
config[f"{framework}.optimized_kernel"] = "cmsis_nn"
libs = config.get(f"{framework}.optimized_kernel_libs", [])
libs.append(self.cmsisnn_lib)
incs = config.get(f"{framework}.optimized_kernel_inc_dirs", [])
include_dirs = [
self.cmsisnn_dir,
str(Path(self.cmsisnn_dir) / "CMSIS" / "Core" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "NN" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "DSP" / "Include"),
]
incs.extend(include_dirs)
config[f"{framework}.optimized_kernel_libs"] = libs
config[f"{framework}.optimized_kernel_inc_dirs"] = incs
def get_required_cache_flags(self):
ret = {}
ret["tflmc.exe"] = ["cmsisnn"]
return ret
@register_feature("cmsisnnbyoc")
class CmsisnnByoc(SetupFeature, FrameworkFeature, BackendFeature):
"""CMSIS-NN kernels for TVM using BYOC wrappers."""
REQUIRED = ["cmsisnn.lib", "cmsisnn.dir"]
def __init__(self, config=None):
super().__init__("cmsisnnbyoc", config=config)
@property
def cmsisnn_lib(self):
return str(self.config["cmsisnn.lib"])
@property
def cmsisnn_dir(self):
return str(self.config["cmsisnn.dir"])
def get_framework_config(self, framework):
assert framework == "tvm", f"Unsupported feature '{self.name}' for framework '{framework}'"
include_dirs = [
self.cmsisnn_dir,
str(Path(self.cmsisnn_dir) / "CMSIS" / "Core" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "NN" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "DSP" / "Include"),
]
return {
f"{framework}.extra_libs": [self.cmsisnn_lib],
f"{framework}.extra_incs": include_dirs,
}
def add_backend_config(self, backend, config):
assert backend in [
"tvmaot",
"tvmrt",
"tvmcg",
], f"Unsupported feature '{self.name}' for backend '{backend}'"
extras = config.get(f"{backend}.extra_kernel", [])
if "cmsis-nn" not in extras:
extras[f"{backend}.extra_kernel"].append("cmsis-nn")
config[f"{backend}.extra_kernel"] = extras
def get_required_cache_flags(self):
ret = {}
ret["tvm.build_dir"] = ["cmsisnn"]
return ret
# @before_feature("muriscvnn") # TODO: implment something like this
@register_feature("vext")
# class Vext(SetupFeature, TargetFeature, PlatformFeature):
class Vext(SetupFeature, TargetFeature):
"""MuriscvNN CMSIS-NN wrappers for TFLite Micro"""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"vlen": 64, # TODO; define reasonable default? (Or put defaults in target and overwrite of not None)
}
REQUIRED = []
def __init__(self, config=None):
super().__init__("vext", config=config)
@property
def vlen(self):
return int(self.config["vlen"])
def get_target_config(self, target):
# TODO: enforce llvm toolchain using add_compile_config and CompileFeature?
assert target in ["spike", "ovpsim"]
assert is_power_of_two(self.vlen)
return {
f"{target}.enable_vext": True,
f"{target}.vlen": self.vlen,
}
# It would be great if we could enforce an llvm toolchain here
# def add_compile_config(self, config):
# # TODO: enforce llvm toolchain using add_compile_config and CompileFeature?
# if "mlif.toolchain" in config:
# assert "mlif.toolchain" == "llvm", "Vext requires LLVM target sw"
# else:
# config["mlif.toolchain"] = "llvm"
def get_required_cache_flags(self):
return {
"muriscvnn.lib": ["vext"],
"muriscvnn.inc_dir": ["vext"],
"tflmc.exe": ["vext"],
}
@register_feature("debug")
class Debug(SetupFeature, PlatformFeature):
"""Enable debugging ability of target software."""
def __init__(self, config=None):
super().__init__("debug", config=config)
def get_required_cache_flags(self):
return {} if self.enabled else {} # TODO: remove?
def get_platform_config(self, platform):
return {f"{platform}.debug": self.enabled}
@register_feature("gdbserver")
class GdbServer(TargetFeature):
"""Start debugging session for target software using gdbserver."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"attach": None,
"port": None,
}
def __init__(self, config=None):
super().__init__("gdbserver", config=config)
@property
def attach(self):
# TODO: implement get_bool_or_none?
return bool(self.config["attach"]) if self.config["attach"] is not None else None
@property
def port(self):
return int(self.config["port"]) if self.config["port"] is not None else None
def get_target_config(self, target):
assert target in ["host_x86", "etiss_pulpino"]
return filter_none(
{
f"{target}.gdbserver_enable": self.enabled,
f"{target}.gdbserver_attach": self.attach,
f"{target}.gdbserver_port": self.port,
}
)
@register_feature("etissdbg")
class ETISSDebug(SetupFeature, TargetFeature):
"""Debug ETISS internals."""
def __init__(self, config=None):
super().__init__("etissdbg", config=config)
def get_required_cache_flags(self):
return {"etiss.install_dir": ["debug"], "etissvp.script": ["debug"]} if self.enabled else {}
def get_target_config(self, target):
assert target in ["etiss_pulpino"]
return {"etiss_pulpino.debug_etiss": self.enabled}
@register_feature("trace")
class Trace(TargetFeature):
"""Enable tracing of all memory accesses in ETISS."""
def __init__(self, config=None):
super().__init__("etissdbg", config=config)
def get_target_config(self, target):
assert target in ["etiss_pulpino"]
return {"etiss_pulpino.trace_memory": self.enabled}
@register_feature("unpacked_api")
class UnpackedApi(BackendFeature): # TODO: should this be a feature or config only?
"""Use unpacked interface api for TVMAOT backend to reduce stack usage."""
def __init__(self, config=None):
super().__init__("unpacked_api", config=config)
def get_backend_config(self, backend):
assert backend in ["tvmaot"], f"Unsupported feature '{self.name}' for backend '{backend}'"
return {f"{backend}.unpacked_api": self.enabled}
@register_feature("packed")
class Packed(FrameworkFeature, FrontendFeature, BackendFeature, SetupFeature, PlatformFeature):
"""Sub-8-bit and sparsity feature for TFLite Micro kernels."""
def __init__(self, config=None):
super().__init__("packed", config=config)
def get_framework_config(self, framework):
raise NotImplementedError
def get_frontend_config(self, frontend):
assert frontend in ["tflite"], f"Unsupported feature '{self.name} for frontend '{frontend}''"
return {f"{frontend}.use_packed_weights": self.enabled}
def get_backend_config(self, backend):
raise NotImplementedError
def get_required_cache_flags(self):
return {"tflmc.exe": ["packed"]}
def get_cmake_args(self):
val = "ON" if self.enabled else "OFF"
return [f"-DDEBUG_ARENA={val}"]
@register_feature("packing")
class Packing(FrontendFeature):
"""Sub-8-bit and sparse weight packing for TFLite Frontend."""
def __init__(self, config=None):
super().__init__("packing", config=config)
def get_frontend_config(self, frontend):
assert frontend in ["tflite"], f"Unsupported feature '{self.name} for frontend '{frontend}''"
raise NotImplementedError
return {f"{frontend}.pack_weights": self.enabled}
@register_feature("memplan")
class Memplan(FrameworkFeature):
"""Custom TVM memory planning feature by (@rafzi)"""
def __init__(self, config=None):
super().__init__("memplan", config=config)
def get_framework_config(self, framework):
assert framework in ["tvm"], f"Usupported fetaure '{self.name}' for framework '{framework}'"
raise NotImplementedError
return {"tvm.memplan_enable": self.enabled}
@register_feature("usmp")
class Usmp(BackendFeature):
"""Unified Static Memory Planning algorithm integrated in TVM"""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"algorithm": "greedy_by_conflicts", # options: greedy_by_conflicts, greedy_by_size, hill_climb
}
def __init__(self, config=None):
super().__init__("usmp", config=config)
@property
def algorithm(self):
return str(self.config["algorithm"])
def add_backend_config(self, backend, config):
assert backend in ["tvmaot"], f"Usupported fetaure '{self.name}' for backend '{backend}'"
if f"{backend}.extra_pass_config" in config:
tmp = config[f"{backend}.extra_pass_config"]
elif "extra_pass_config" in config:
tmp = config["extra_pass_config"]
else:
tmp = {}
tmp["tir.usmp.enable"] = self.enabled
tmp["tir.usmp.algorithm"] = self.algorithm
config.update({f"{backend}.extra_pass_config": tmp})
# -> enable this via backend
@register_feature("fusetile")
class Fusetile(FrameworkFeature): # TODO: rename to MOIOPT?
"""WIP TVM feature by (@rafzi)"""
def __init__(self, config=None):
super().__init__("fusetile", config=config)
def get_framework_config(self, framework):
assert framework in ["tvm"], f"Usupported fetaure '{self.name}' for framework '{framework}'"
raise NotImplementedError
return {"tvm.fusetile_enable": self.enabled}
# -> enable this via backend
@register_feature("visualize")
class Visualize(BackendFeature):
"""Visualize TVM relay models."""
# Bokeh backend has additional python requirements: graphviz, pydot, bokeh >= 2.3.1
# TODO: add tflite visualizer? (Frontend)
DEFAULTS = {
**FeatureBase.DEFAULTS,
"mode": "cli", # Alternative: bokeh
}
def __init__(self, config=None):
super().__init__("visualize", config=config)
@property
def mode(self):
value = self.config["mode"] if "mode" in self.config else None
if value:
assert value.lower() in ["cli", "bokeh"]
return value
def get_backend_config(self, backend):
assert backend in TVM_BACKENDS, f"Unsupported feature '{self.name}' for backend '{backend}'" # TODO: undefined!
return NotImplementedError
return filter_none(
{
f"{backend}.visualize_enable": self.enabled,
f"{backend}.visualize_mode": self.mode,
}
)
@register_feature("autotuned")
class Autotuned(BackendFeature):
"""Use existing TVM autotuning logs in backend."""
# TODO: FronendFeature to collect tuning logs or will we store them somewhere else?
DEFAULTS = {
**FeatureBase.DEFAULTS,
"results_file": None,
}
def __init__(self, config=None):
super().__init__("autotuned", config=config)
@property
def results_file(self):
return self.config["results_file"] if "results_file" in self.config else None
def get_backend_config(self, backend):
assert backend in ["tvmaot", "tvmcg", "tvmrt"] # TODO: backend in TVM_BACKENDS
# TODO: error handling her eor on backend?
return filter_none(
{
f"{backend}.use_tuning_results": self.enabled,
f"{backend}.autotuning_results_file": self.results_file,
}
)
@register_feature("autotune")
class Autotune(BackendFeature, RunFeature):
"""Use the TVM autotuner inside the backend to generate tuning logs."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"results_file": None,
"append": None,
"tuner": None,
"trial": None,
"early_stopping": None,
"num_workers": None,
"max_parallel": None,
"use_rpc": None,
"timeout": None,
# All None to use the defaults defined in the backend instead
}
def __init__(self, config=None):
super().__init__("autotune", config=config)
@property
def results_file(self):
return self.config["results_file"] if "results_file" in self.config else None
@property
def append(self):
return self.config["append"] if "append" in self.config else None
@property
def tuner(self):
return self.config["tuner"] if "tuner" in self.config else None
@property
def trials(self):
return self.config["trials"] if "trials" in self.config else None
@property
def early_stopping(self):
return self.config["early_stopping"] if "early_stopping" in self.config else None
@property
def num_workers(self):
return self.config["num_workers"] if "num_workers" in self.config else None
@property
def max_parallel(self):
return self.config["max_parallel"] if "max_parallel" in self.config else None
@property
def use_rpc(self):
return self.config["use_rpc"] if "use_rpc" in self.config else None
@property
def timeout(self):
return self.config["timeout"] if "timeout" in self.config else None
def get_backend_config(self, backend):
assert backend in ["tvmaot", "tvmcg", "tvmrt"] # TODO: backend in TVM_BACKENDS
# TODO: figure out a default path automatically
return filter_none(
{
f"{backend}.autotuning_enable": self.enabled,
# f"{backend}.autotuning_use_tuned": self.enabled, # Should Autotuning ==> Autotuned?
f"{backend}.autotuning_results_file": self.results_file,
f"{backend}.autotuning_append": self.append,
f"{backend}.autotuning_tuner": self.tuner,
f"{backend}.autotuning_trials": self.trials,
f"{backend}.autotuning_early_stopping": self.early_stopping,
f"{backend}.autotuning_num_workers": self.num_workers,
f"{backend}.autotuning_max_parallel": self.max_parallel,
f"{backend}.autotuning_use_rpc": self.use_rpc,
f"{backend}.autotuning_timeout": self.timeout,
}
)
def get_run_config(self):
return {"run.tune_enabled": self.enabled}
@register_feature("disable_legalize")
class DebugArena(BackendFeature, PlatformFeature):
"""Enable verbose printing of arena usage for debugging."""
def __init__(self, config=None):
super().__init__("debug_arena", config=config)
def get_backend_config(self, backend):
assert backend in [
"tvmaot",
"tvmcg",
"tvmrt",
], f"Unsupported feature '{self.name}' for backend '{backend}'"
# TODO: TFLM also compatible?
return {f"{backend}.debug_arena": self.enabled}
# def get_platform_config(self):
# return {"mlif.debug_arena": True}
def get_cmake_args(self):
val = "ON" if self.enabled else "OFF"
return [f"-DDEBUG_ARENA={val}"]
@register_feature("validate")
class Validate(FrontendFeature, PlatformFeature):
"""Enable validaton of inout and output tensors."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"allow_missing": True,
}
def __init__(self, config=None):
super().__init__("validate", config=config)
@property
def allow_missing(self):
return bool(self.config["allow_missing"])
def get_frontend_config(self, frontend):
if not self.allow_missing:
raise NotImplementedError
return {f"{frontend}.use_inout_data": True}
def get_platform_config(self, platform):
assert platform == "mlif", f"Unsupported feature '{self.name}' for platform '{platform}'"
return {f"{platform}.ignore_data": False}
# def get_cmake_args(self):
# pass
@register_feature("muriscvnn")
class Muriscvnn(SetupFeature, FrameworkFeature):
"""MuriscvNN CMSIS-NN wrappers for TFLite Micro"""
REQUIRED = ["muriscvnn.lib", "muriscvnn.inc_dir"]
def __init__(self, config=None):
super().__init__("muriscvnn", config=config)
@property
def muriscvnn_lib(self):
return str(self.config["muriscvnn.lib"])
@property
def muriscvnn_inc_dir(self):
return str(self.config["muriscvnn.inc_dir"])
def add_framework_config(self, framework, config):
assert framework == "tflite", f"Unsupported feature '{self.name}' for framework '{framework}'"
if f"{framework}.optimized_kernel" in config and config[f"{framework}.optimized_kernel"] not in [
None,
"cmsis_nn",
]:
RuntimeError(f"There is already a optimized_kernel selected for framework '{framework}'")
else:
config[f"{framework}.optimized_kernel"] = "cmsis_nn"
libs = config.get(f"{framework}.optimized_kernel_libs", [])
libs.append(self.muriscvnn_lib)
incs = config.get(f"{framework}.optimized_kernel_inc_dirs", [])
incs.append(self.muriscvnn_inc_dir)
config[f"{framework}.optimized_kernel_libs"] = libs
config[f"{framework}.optimized_kernel_inc_dirs"] = incs
def get_required_cache_flags(self):
ret = {}
ret["tflmc.exe"] = ["muriscvnn"]
return ret
@register_feature("cmsisnn")
class Cmsisnn(SetupFeature, FrameworkFeature):
"""CMSIS-NN kernels for TFLite Micro/TVM"""
REQUIRED = ["cmsisnn.lib", "cmsisnn.dir"]
def __init__(self, config=None):
super().__init__("cmsisnn", config=config)
@property
def cmsisnn_lib(self):
return str(self.config["cmsisnn.lib"])
@property
def cmsisnn_dir(self):
return str(self.config["cmsisnn.dir"])
def add_framework_config(self, framework, config):
assert framework == "tflite", f"Unsupported feature '{self.name}' for framework '{framework}'"
if f"{framework}.optimized_kernel" in config and config[f"{framework}.optimized_kernel"] not in [
None,
"cmsis_nn",
]:
RuntimeError(f"There is already a optimized_kernel selected for framework '{framework}'")
else:
config[f"{framework}.optimized_kernel"] = "cmsis_nn"
libs = config.get(f"{framework}.optimized_kernel_libs", [])
libs.append(self.cmsisnn_lib)
incs = config.get(f"{framework}.optimized_kernel_inc_dirs", [])
include_dirs = [
self.cmsisnn_dir,
str(Path(self.cmsisnn_dir) / "CMSIS" / "Core" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "NN" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "DSP" / "Include"),
]
incs.extend(include_dirs)
config[f"{framework}.optimized_kernel_libs"] = libs
config[f"{framework}.optimized_kernel_inc_dirs"] = incs
def get_required_cache_flags(self):
ret = {}
ret["tflmc.exe"] = ["cmsisnn"]
return ret
@register_feature("cmsisnnbyoc")
class CmsisnnByoc(SetupFeature, FrameworkFeature, BackendFeature):
"""CMSIS-NN kernels for TVM using BYOC wrappers."""
REQUIRED = ["cmsisnn.lib", "cmsisnn.dir"]
def __init__(self, config=None):
super().__init__("cmsisnnbyoc", config=config)
@property
def cmsisnn_lib(self):
return str(self.config["cmsisnn.lib"])
@property
def cmsisnn_dir(self):
return str(self.config["cmsisnn.dir"])
def get_framework_config(self, framework):
assert framework == "tvm", f"Unsupported feature '{self.name}' for framework '{framework}'"
include_dirs = [
self.cmsisnn_dir,
str(Path(self.cmsisnn_dir) / "CMSIS" / "Core" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "NN" / "Include"),
str(Path(self.cmsisnn_dir) / "CMSIS" / "DSP" / "Include"),
]
return {
f"{framework}.extra_libs": [self.cmsisnn_lib],
f"{framework}.extra_incs": include_dirs,
}
def add_backend_config(self, backend, config):
assert backend in [
"tvmaot",
"tvmrt",
"tvmcg",
], f"Unsupported feature '{self.name}' for backend '{backend}'"
extras = config.get(f"{backend}.extra_kernel", [])
if "cmsis-nn" not in extras:
extras[f"{backend}.extra_kernel"].append("cmsis-nn")
config[f"{backend}.extra_kernel"] = extras
def get_required_cache_flags(self):
ret = {}
ret["tvm.build_dir"] = ["cmsisnn"]
return ret
# @before_feature("muriscvnn") # TODO: implment something like this
@register_feature("vext")
# class Vext(SetupFeature, TargetFeature, PlatformFeature):
class Vext(SetupFeature, TargetFeature):
"""MuriscvNN CMSIS-NN wrappers for TFLite Micro"""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"vlen": 64, # TODO; define reasonable default? (Or put defaults in target and overwrite of not None)
}
REQUIRED = []
def __init__(self, config=None):
super().__init__("vext", config=config)
@property
def vlen(self):
return int(self.config["vlen"])
def get_target_config(self, target):
# TODO: enforce llvm toolchain using add_compile_config and CompileFeature?
assert target in ["spike", "ovpsim"]
assert is_power_of_two(self.vlen)
return {
f"{target}.enable_vext": True,
f"{target}.vlen": self.vlen,
}
# It would be great if we could enforce an llvm toolchain here
# def add_compile_config(self, config):
# # TODO: enforce llvm toolchain using add_compile_config and CompileFeature?
# if "mlif.toolchain" in config:
# assert "mlif.toolchain" == "llvm", "Vext requires LLVM target sw"
# else:
# config["mlif.toolchain"] = "llvm"
def get_required_cache_flags(self):
return {
"muriscvnn.lib": ["vext"],
"muriscvnn.inc_dir": ["vext"],
"tflmc.exe": ["vext"],
}
@register_feature("debug")
class Debug(SetupFeature, PlatformFeature):
"""Enable debugging ability of target software."""
def __init__(self, config=None):
super().__init__("debug", config=config)
def get_required_cache_flags(self):
return {} if self.enabled else {} # TODO: remove?
def get_platform_config(self, platform):
return {f"{platform}.debug": self.enabled}
@register_feature("gdbserver")
class GdbServer(TargetFeature):
"""Start debugging session for target software using gdbserver."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"attach": None,
"port": None,
}
def __init__(self, config=None):
super().__init__("gdbserver", config=config)
@property
def attach(self):
# TODO: implement get_bool_or_none?
return bool(self.config["attach"]) if self.config["attach"] is not None else None
@property
def port(self):
return int(self.config["port"]) if self.config["port"] is not None else None
def get_target_config(self, target):
assert target in ["host_x86", "etiss_pulpino"]
return filter_none(
{
f"{target}.gdbserver_enable": self.enabled,
f"{target}.gdbserver_attach": self.attach,
f"{target}.gdbserver_port": self.port,
}
)
@register_feature("etissdbg")
class ETISSDebug(SetupFeature, TargetFeature):
"""Debug ETISS internals."""
def __init__(self, config=None):
super().__init__("etissdbg", config=config)
def get_required_cache_flags(self):
return {"etiss.install_dir": ["debug"], "etissvp.script": ["debug"]} if self.enabled else {}
def get_target_config(self, target):
assert target in ["etiss_pulpino"]
return {"etiss_pulpino.debug_etiss": self.enabled}
@register_feature("trace")
class Trace(TargetFeature):
"""Enable tracing of all memory accesses in ETISS."""
def __init__(self, config=None):
super().__init__("etissdbg", config=config)
def get_target_config(self, target):
assert target in ["etiss_pulpino"]
return {"etiss_pulpino.trace_memory": self.enabled}
@register_feature("unpacked_api")
class UnpackedApi(BackendFeature): # TODO: should this be a feature or config only?
"""Use unpacked interface api for TVMAOT backend to reduce stack usage."""
def __init__(self, config=None):
super().__init__("unpacked_api", config=config)
def get_backend_config(self, backend):
assert backend in ["tvmaot"], f"Unsupported feature '{self.name}' for backend '{backend}'"
return {f"{backend}.unpacked_api": self.enabled}
@register_feature("packed")
class Packed(FrameworkFeature, FrontendFeature, BackendFeature, SetupFeature, PlatformFeature):
"""Sub-8-bit and sparsity feature for TFLite Micro kernels."""
def __init__(self, config=None):
super().__init__("packed", config=config)
def get_framework_config(self, framework):
raise NotImplementedError
def get_frontend_config(self, frontend):
assert frontend in ["tflite"], f"Unsupported feature '{self.name} for frontend '{frontend}''"
return {f"{frontend}.use_packed_weights": self.enabled}
def get_backend_config(self, backend):
raise NotImplementedError
def get_required_cache_flags(self):
return {"tflmc.exe": ["packed"]}
def get_cmake_args(self):
val = "ON" if self.enabled else "OFF"
return [f"-DDEBUG_ARENA={val}"]
@register_feature("packing")
class Packing(FrontendFeature):
"""Sub-8-bit and sparse weight packing for TFLite Frontend."""
def __init__(self, config=None):
super().__init__("packing", config=config)
def get_frontend_config(self, frontend):
assert frontend in ["tflite"], f"Unsupported feature '{self.name} for frontend '{frontend}''"
raise NotImplementedError
return {f"{frontend}.pack_weights": self.enabled}
@register_feature("fallback")
class Fallback(FrameworkFeature, PlatformFeature):
"""(Unimplemented) TFLite Fallback for unsupported and custom operators in TVM."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"config_file": None,
}
def __init__(self, config=None):
super().__init__("fallback", config=config)
@property
def config_file(self):
return str(self.config["config_file"]) if "config_file" in self.config else None
def get_framework_config(self, framework):
assert framework in ["tvm"], f"Usupported fetaure '{self.name}' for framework '{framework}'"
raise NotImplementedError
return filter_none(
{
f"{framework}.fallback_enable": self.enabled,
f"{framework}.fallback_config_file": self.config_file,
}
)
# -> hard to model..., preprocess for tflmc?
@register_feature("memplan")
class Memplan(FrameworkFeature):
"""Custom TVM memory planning feature by (@rafzi)"""
def __init__(self, config=None):
super().__init__("memplan", config=config)
def get_framework_config(self, framework):
assert framework in ["tvm"], f"Usupported fetaure '{self.name}' for framework '{framework}'"
return {"tvm.memplan_enable": self.enabled}
@register_feature("usmp")
class Usmp(BackendFeature):
"""Unified Static Memory Planning algorithm integrated in TVM"""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"algorithm": "greedy_by_conflicts", # options: greedy_by_conflicts, greedy_by_size, hill_climb
}
def __init__(self, config=None):
super().__init__("usmp", config=config)
@property
def algorithm(self):
return str(self.config["algorithm"])
def add_backend_config(self, backend, config):
assert backend in ["tvmaot"], f"Usupported fetaure '{self.name}' for backend '{backend}'"
if f"{backend}.extra_pass_config" in config:
tmp = config[f"{backend}.extra_pass_config"]
elif "extra_pass_config" in config:
tmp = config["extra_pass_config"]
else:
tmp = {}
tmp["tir.usmp.enable"] = self.enabled
tmp["tir.usmp.algorithm"] = self.algorithm
config.update({f"{backend}.extra_pass_config": tmp})
# -> enable this via backend
@register_feature("fusetile")
class Fusetile(FrameworkFeature):
"""WIP TVM feature by (@rafzi)"""
def __init__(self, config=None):
super().__init__("fusetile", config=config)
def get_framework_config(self, framework):
assert framework in ["tvm"], f"Usupported fetaure '{self.name}' for framework '{framework}'"
return {"tvm.fusetile_enable": self.enabled}
# -> enable this via backend
@register_feature("visualize")
class Visualize(BackendFeature):
"""Visualize TVM relay models."""
# Bokeh backend has additional python requirements: graphviz, pydot, bokeh >= 2.3.1
# TODO: add tflite visualizer? (Frontend)
DEFAULTS = {
**FeatureBase.DEFAULTS,
"mode": "cli", # Alternative: bokeh
}
def __init__(self, config=None):
super().__init__("visualize", config=config)
@property
def mode(self):
value = self.config["mode"] if "mode" in self.config else None
if value:
assert value.lower() in ["cli", "bokeh"]
return value
def get_backend_config(self, backend):
assert backend in TVM_BACKENDS, f"Unsupported feature '{self.name}' for backend '{backend}'" # TODO: undefined!
return NotImplementedError
return filter_none(
{
f"{backend}.visualize_enable": self.enabled,
f"{backend}.visualize_mode": self.mode,
}
)
@register_feature("autotuned")
class Autotuned(BackendFeature):
"""Use existing TVM autotuning logs in backend."""
# TODO: FronendFeature to collect tuning logs or will we store them somewhere else?
DEFAULTS = {
**FeatureBase.DEFAULTS,
"results_file": None,
}
def __init__(self, config=None):
super().__init__("autotuned", config=config)
@property
def results_file(self):
return self.config["results_file"] if "results_file" in self.config else None
def get_backend_config(self, backend):
assert backend in ["tvmaot", "tvmcg", "tvmrt"] # TODO: backend in TVM_BACKENDS
# TODO: error handling her eor on backend?
return filter_none(
{
f"{backend}.use_tuning_results": self.enabled,
f"{backend}.autotuning_results_file": self.results_file,
}
)
@register_feature("autotune")
class Autotune(BackendFeature, RunFeature):
"""Use the TVM autotuner inside the backend to generate tuning logs."""
DEFAULTS = {
**FeatureBase.DEFAULTS,
"results_file": None,
"append": None,
"tuner": None,
"trial": None,
"early_stopping": None,
"num_workers": None,
"max_parallel": None,
"use_rpc": None,
"timeout": None,
# All None to use the defaults defined in the backend instead
}
def __init__(self, config=None):
super().__init__("autotune", config=config)
@property
def results_file(self):
return self.config["results_file"] if "results_file" in self.config else None
@property
def append(self):
return self.config["append"] if "append" in self.config else None
@property
def tuner(self):
return self.config["tuner"] if "tuner" in self.config else None
@property
def trials(self):
return self.config["trials"] if "trials" in self.config else None
@property
def early_stopping(self):
return self.config["early_stopping"] if "early_stopping" in self.config else None
@property
def num_workers(self):
return self.config["num_workers"] if "num_workers" in self.config else None
@property
def max_parallel(self):
return self.config["max_parallel"] if "max_parallel" in self.config else None
@property
def use_rpc(self):
return self.config["use_rpc"] if "use_rpc" in self.config else None
@property
def timeout(self):
return self.config["timeout"] if "timeout" in self.config else None
def get_backend_config(self, backend):
assert backend in ["tvmaot", "tvmcg", "tvmrt"] # TODO: backend in TVM_BACKENDS
# TODO: figure out a default path automatically
return filter_none(
{
f"{backend}.autotuning_enable": self.enabled,
# f"{backend}.autotuning_use_tuned": self.enabled, # Should Autotuning ==> Autotuned?
f"{backend}.autotuning_results_file": self.results_file,
f"{backend}.autotuning_append": self.append,
f"{backend}.autotuning_tuner": self.tuner,
f"{backend}.autotuning_trials": self.trials,
f"{backend}.autotuning_early_stopping": self.early_stopping,
f"{backend}.autotuning_num_workers": self.num_workers,
f"{backend}.autotuning_max_parallel": self.max_parallel,
f"{backend}.autotuning_use_rpc": self.use_rpc,
f"{backend}.autotuning_timeout": self.timeout,
}
)
def get_run_config(self):
return {"run.tune_enabled": self.enabled}
@register_feature("disable_legalize")
class DisableLegalize(BackendFeature, SetupFeature):
"""Enable transformation to reduces sizes of intermediate buffers by skipping legalization passes."""
REQUIRED = ["tvm_extensions.wrapper"]
def __init__(self, config=None):
super().__init__("disable_legalize", config=config)
@property
def tvm_extensions_wrapper(self):
return self.config["tvm_extensions.wrapper"]
def add_backend_config(self, backend, config):
assert backend in [
"tvmaot",
"tvmcg",
"tvmrt",
], f"Unsupported feature '{self.name}' for backend '{backend}'"
if f"{backend}.tvmc_extra_args" in config:
config[f"{backend}.tvmc_extra_args"].append("--disable-legalize")
else:
config[f"{backend}.tvmc_extra_args"] = ["--disable-legalize"]
if f"{backend}.tvmc_custom_script" in config:
assert config[f"{backend}.tvmc_custom_script"] is None or str(
config[f"{backend}.tvmc_custom_script"]
) == str(
self.tvm_extensions_src
), f"{backend}.tvmc_custom_script is already set. Can't enable feature: {self.name}"
config[f"{backend}.tvmc_custom_script"] = self.tvm_extensions_wrapper
def get_required_cache_flags(self):
ret = {}
ret["tvm.pythonpath"] = ["patch"]
return ret
| 33.0784
| 120
| 0.64049
| 4,775
| 41,348
| 5.343246
| 0.084188
| 0.045465
| 0.017245
| 0.026652
| 0.910559
| 0.906522
| 0.897037
| 0.895861
| 0.894528
| 0.891589
| 0
| 0.000823
| 0.236239
| 41,348
| 1,249
| 121
| 33.104884
| 0.807093
| 0.144263
| 0
| 0.869203
| 0
| 0
| 0.23259
| 0.083899
| 0
| 0
| 0
| 0.000801
| 0.054697
| 1
| 0.191439
| false
| 0.011891
| 0.003567
| 0.067776
| 0.401903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c3d231a0e07bc8b89b413da5771856f7cb54acf
| 66,591
|
py
|
Python
|
test/integration/component/test_accounts.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | 2
|
2015-05-19T05:04:30.000Z
|
2016-09-07T00:33:17.000Z
|
test/integration/component/test_accounts.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | null | null | null |
test/integration/component/test_accounts.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | 2
|
2017-07-07T14:49:03.000Z
|
2018-07-31T06:38:42.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for Account
"""
#Import Local Modules
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from marvin import remoteSSHClient
from nose.plugins.attrib import attr
from marvin.cloudstackException import cloudstackAPIException
class Services:
"""Test Account Services
"""
def __init__(self):
self.services = {
"domain": {
"name": "Domain",
},
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "fr3sca",
},
"user": {
"email": "user@test.com",
"firstname": "User",
"lastname": "User",
"username": "User",
# Random characters are appended for unique
# username
"password": "fr3sca",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
# in MHz
"memory": 128,
# In MBs
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"template": {
"displaytext": "Public Template",
"name": "Public template",
"url": "http://download.cloud.com/releases/2.0.0/UbuntuServer-10-04-64bit.vhd.bz2",
"hypervisor": 'XenServer',
"format": 'VHD',
"isfeatured": True,
"ispublic": True,
"isextractable": True,
"ostype": 'CentOS 5.3 (64-bit)',
},
"natrule": {
"publicport": 22,
"privateport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestAccounts(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestAccounts,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created accounts, domains etc
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_create_account(self):
"""Test Create Account and user for that account
"""
# Validate the following
# 1. Create an Account. Verify the account is created.
# 2. Create User associated with that account. Verify the created user
# Create an account
account = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account.name)
self.cleanup.append(account)
list_accounts_response = list_accounts(
self.apiclient,
id=account.id
)
self.assertEqual(
isinstance(list_accounts_response, list),
True,
"Check list accounts for valid data"
)
self.assertNotEqual(
len(list_accounts_response),
0,
"Check List Account response"
)
account_response = list_accounts_response[0]
self.assertEqual(
account.accounttype,
account_response.accounttype,
"Check Account Type of Created account"
)
self.assertEqual(
account.name,
account_response.name,
"Check Account Name of Created account"
)
# Create an User associated with account
user = User.create(
self.apiclient,
self.services["user"],
account=account.name,
domainid=account.domainid
)
self.debug("Created user: %s" % user.id)
list_users_response = list_users(
self.apiclient,
id=user.id
)
self.assertEqual(
isinstance(list_users_response, list),
True,
"Check list users for valid data"
)
self.assertNotEqual(
len(list_users_response),
0,
"Check List User response"
)
user_response = list_users_response[0]
self.assertEqual(
user.username,
user_response.username,
"Check username of Created user"
)
self.assertEqual(
user.state,
user_response.state,
"Check state of created user"
)
return
class TestNonRootAdminsPrivileges(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestNonRootAdminsPrivileges,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone settings
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
# Create an account, domain etc
cls.domain = Domain.create(
cls.api_client,
cls.services["domain"],
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls._cleanup = [
cls.account,
cls.domain
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created accounts
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_non_root_admin_Privileges(self):
"""Test to verify Non Root admin previleges"""
# Validate the following
# 1. Create few accounts/users in ROOT domain
# 2. Verify listAccounts API gives only accounts associated with new
# domain.
# Create accounts for ROOT domain
account_1 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_1.name)
self.cleanup.append(account_1)
account_2 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_2.name)
self.cleanup.append(account_2)
accounts_response = list_accounts(
self.apiclient,
domainid=self.domain.id
)
self.assertEqual(
isinstance(accounts_response, list),
True,
"Check list accounts response for valid data"
)
self.assertEqual(
len(accounts_response),
1,
"Check List accounts response"
)
# Verify only account associated with domain is listed
for account in accounts_response:
self.assertEqual(
account.domainid,
self.domain.id,
"Check domain ID of account"
)
return
class TestServiceOfferingSiblings(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingSiblings,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Create Domains, accounts etc
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_1,
cls.account_2,
cls.service_offering,
cls.domain_1,
cls.domain_2,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created domains, accounts
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_service_offering_siblings(self):
"""Test to verify service offerings at same level in hierarchy"""
# Validate the following
# 1. Verify service offering is visible for domain_1
# 2. Verify service offering is not visible for domain_2
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check if valid list service offerings response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.debug("Validating service offering: %s" % service_offering.id)
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings response for other domain"
)
return
class TestServiceOfferingHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingHierarchy,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Create domain, service offerings etc
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.service_offering,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_service_offering_hierarchy(self):
"""Test to verify service offerings at same level in hierarchy"""
# Validate the following
# 1. Verify service offering is visible for domain_1
# 2. Verify service offering is also visible for domain_2
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check List Service Offerings for a valid response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings for a valid response"
)
return
class TestTemplateHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestTemplateHierarchy,
cls).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone settings
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
cls.services["template"]["zoneid"] = cls.zone.id
# Create domains, accounts and template
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls.template = Template.register(
cls.api_client,
cls.services["template"],
account=cls.account_1.name,
domainid=cls.domain_1.id
)
# Wait for template to download
cls.template.download(cls.api_client)
# Wait for template status to be changed across
time.sleep(60)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.template,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_template_hierarchy(self):
"""Test to verify template at same level in hierarchy"""
# Validate the following
# 1. Verify template is visible for domain_1
# 2. Verify template is also visible for domain_2
# Sleep to ensure that template state is reflected across
templates = list_templates(
self.apiclient,
templatefilter='self',
account=self.account_1.name,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Check List templates for a valid response"
)
self.assertNotEqual(
len(templates),
0,
"Check List Template response"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
templates = list_templates(
self.apiclient,
id=self.template.id,
templatefilter='all',
account=self.account_2.name,
domainid=self.domain_2.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Check List templates for a valid response"
)
self.assertNotEqual(
len(templates),
0,
"Check List Service Offerings response"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_2.name)
)
return
class TestAddVmToSubDomain(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestAddVmToSubDomain,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Setup working Environment- Create domain, zone, pod cluster etc.
cls.domain = get_domain(
cls.api_client,
cls.services
)
cls.zone = get_zone(
cls.api_client,
cls.services,
)
cls.services['mode'] = cls.zone.networktype
cls.sub_domain = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.sub_domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain.id
)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_1.name,
domainid=cls.account_1.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_2.name,
domainid=cls.account_2.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.account_2,
cls.account_1,
cls.sub_domain,
cls.service_offering
]
return
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created resources
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created resources
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_add_vm_to_subdomain(self):
""" Test Sub domain allowed to launch VM when a Domain level zone is created"""
# Validate the following
# 1. Verify VM created by Account_1 is in Running state
# 2. Verify VM created by Account_2 is in Running state
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_1.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check List VM for a valid response"
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_2.id
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
return
class TestUserDetails(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestUserDetails,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain etc
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateUserDetails(self):
"""Test user update API
"""
# Steps for test scenario
# 1. create a user account
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateAdminDetails(self):
"""Test update admin details
"""
# Steps for test scenario
# 1. create a admin account
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating a ROOT admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateDomainAdminDetails(self):
"""Test update domain admin details
"""
# Steps for test scenario
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating a domain admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
class TestUserLogin(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestUserLogin,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain etc
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiUuidResponse(self):
"""Test if Login API does not return UUID's
"""
# Steps for test scenario
# 1. create a user account
# 2. login to the user account with given credentials (loginCmd)
# 3. delete the user account
# Validate the following
# 1. listAccounts should return account created
# 2. loginResponse should have UUID only is response. Assert by
# checking database id is not same as response id
# Login also succeeds with non NULL sessionId in response
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"]
)
self.assertEqual(respose, None, "Login response should not be none")
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiDomain(self):
"""Test login API with domain
"""
# Steps for test scenario
# 1. create a domain
# 2. create user in the domain
# 3. login to the user account above using UUID domain/user
# 4. delete the user account
# Validate the following
# 1. listDomains returns created domain
# 2. listAccounts returns created user
# 3. loginResponse should have UUID only in responses
# Login also succeeds with non NULL sessionId in response
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created succesfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating an user account in domain: %s" % domain.name)
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account)
accounts = Account.list(
self.apiclient,
name=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(accounts, list),
True,
"List accounts should return a valid response"
)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"],
domainid=domain.id)
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
class TestDomainForceRemove(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDomainForceRemove,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Setup working Environment- Create domain, zone, pod cluster etc.
cls.domain = get_domain(
cls.api_client,
cls.services
)
cls.zone = get_zone(
cls.api_client,
cls.services,
)
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created resources
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created resources
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator"])
def test_forceDeleteDomain(self):
""" Test delete domain with force option"""
# Steps for validations
# 1. create a domain DOM
# 2. create 2 users under this domain
# 3. deploy 1 VM into each of these user accounts
# 4. create PF / FW rules for port 22 on these VMs for their
# respective accounts
# 5. delete the domain with force=true option
# Validate the following
# 1. listDomains should list the created domain
# 2. listAccounts should list the created accounts
# 3. listvirtualmachines should show the Running VMs
# 4. PF and FW rules should be shown in listFirewallRules
# 5. domain should delete successfully and above three list calls
# should show all the resources now deleted. listRouters should
# not return any routers in the deleted accounts/domains
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain is created succesfully.")
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain with force option")
try:
domain.delete(self.apiclient, cleanup=True)
except Exception as e:
self.fail("Failed to delete domain: %s" % e)
self.debug("Waiting for account.cleanup.interval" +
" to cleanup any remaining resouces")
# Sleep 2*account.gc to ensure that all resources are deleted
wait_for_cleanup(self.apiclient, ["account.cleanup.interval"]*2)
self.debug("Checking if the resources in domain are deleted")
with self.assertRaises(cloudstackAPIException):
Account.list(
self.apiclient,
name=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator"])
def test_DeleteDomain(self):
""" Test delete domain without force option"""
# Steps for validations
# 1. create a domain DOM
# 2. create 2 users under this domain
# 3. deploy 1 VM into each of these user accounts
# 4. create PF / FW rules for port 22 on these VMs for their
# respective accounts
# 5. delete the domain with force=false option
# Validate the following
# 1. listDomains should list the created domain
# 2. listAccounts should list the created accounts
# 3. listvirtualmachines should show the Running VMs
# 4. PF and FW rules should be shown in listFirewallRules
# 5. domain deletion should fail saying there are resources under use
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created successfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_1)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_2)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.cleanup.append(self.service_offering)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain without force option")
with self.assertRaises(Exception):
domain.delete(self.apiclient, cleanup=False)
return
| 39.755821
| 115
| 0.445406
| 5,438
| 66,591
| 5.373299
| 0.076499
| 0.034702
| 0.023409
| 0.024127
| 0.820842
| 0.787953
| 0.776865
| 0.757084
| 0.746612
| 0.714921
| 0
| 0.009452
| 0.486823
| 66,591
| 1,674
| 116
| 39.77957
| 0.845609
| 0.112222
| 0
| 0.731839
| 0
| 0.000773
| 0.118924
| 0.000815
| 0
| 0
| 0
| 0
| 0.047141
| 1
| 0.03864
| false
| 0.003864
| 0.006182
| 0
| 0.090417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1335240cee24e04e7556524f53b1b51cffb3db6e
| 6,126
|
py
|
Python
|
nova/tests/unit/scheduler/filters/test_io_ops_filters.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/filters/test_io_ops_filters.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/filters/test_io_ops_filters.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'scheduler'
op|'.'
name|'filters'
name|'import'
name|'io_ops_filter'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'scheduler'
name|'import'
name|'fakes'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestNumInstancesFilter
name|'class'
name|'TestNumInstancesFilter'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|test_filter_num_iops_passes
indent|' '
name|'def'
name|'test_filter_num_iops_passes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'max_io_ops_per_host'
op|'='
number|'8'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'io_ops_filter'
op|'.'
name|'IoOpsFilter'
op|'('
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'num_io_ops'"
op|':'
number|'7'
op|'}'
op|')'
newline|'\n'
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_filter_num_iops_fails
dedent|''
name|'def'
name|'test_filter_num_iops_fails'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'max_io_ops_per_host'
op|'='
number|'8'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'io_ops_filter'
op|'.'
name|'IoOpsFilter'
op|'('
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'num_io_ops'"
op|':'
number|'8'
op|'}'
op|')'
newline|'\n'
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.filters.utils.aggregate_values_from_key'"
op|')'
newline|'\n'
DECL|member|test_aggregate_filter_num_iops_value
name|'def'
name|'test_aggregate_filter_num_iops_value'
op|'('
name|'self'
op|','
name|'agg_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'max_io_ops_per_host'
op|'='
number|'7'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'io_ops_filter'
op|'.'
name|'AggregateIoOpsFilter'
op|'('
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'num_io_ops'"
op|':'
number|'7'
op|'}'
op|')'
newline|'\n'
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
name|'context'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctx'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'host'
op|','
string|"'max_io_ops_per_host'"
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
string|"'8'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.filters.utils.aggregate_values_from_key'"
op|')'
newline|'\n'
DECL|member|test_aggregate_filter_num_iops_value_error
name|'def'
name|'test_aggregate_filter_num_iops_value_error'
op|'('
name|'self'
op|','
name|'agg_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'max_io_ops_per_host'
op|'='
number|'8'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'io_ops_filter'
op|'.'
name|'AggregateIoOpsFilter'
op|'('
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'num_io_ops'"
op|':'
number|'7'
op|'}'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
string|"'XXX'"
op|']'
op|')'
newline|'\n'
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
name|'context'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctx'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'host'
op|','
string|"'max_io_ops_per_host'"
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 13.202586
| 88
| 0.62145
| 937
| 6,126
| 3.927428
| 0.140875
| 0.164674
| 0.089674
| 0.075
| 0.789674
| 0.779891
| 0.766576
| 0.748641
| 0.715489
| 0.692663
| 0
| 0.003806
| 0.099412
| 6,126
| 463
| 89
| 13.231102
| 0.663223
| 0
| 0
| 0.937365
| 0
| 0
| 0.399445
| 0.057623
| 0
| 0
| 0
| 0
| 0.015119
| 0
| null | null | 0.015119
| 0.010799
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
137fd3fb880be9a30fafd38ccf31e9d770579e23
| 7,075
|
py
|
Python
|
authors/apps/articles/tests/api/test_comments.py
|
C3real-kill3r/binary-jungle-backend
|
5333138fbce901e75accf5487b10990979afa571
|
[
"MIT"
] | null | null | null |
authors/apps/articles/tests/api/test_comments.py
|
C3real-kill3r/binary-jungle-backend
|
5333138fbce901e75accf5487b10990979afa571
|
[
"MIT"
] | 8
|
2020-02-12T03:04:07.000Z
|
2022-03-12T00:07:31.000Z
|
authors/apps/articles/tests/api/test_comments.py
|
C3real-kill3r/binary-jungle-backend
|
5333138fbce901e75accf5487b10990979afa571
|
[
"MIT"
] | null | null | null |
from rest_framework import status
from rest_framework.reverse import reverse
from authors.apps.articles.tests.api.test_articles import BaseArticlesTestCase
import json
class TestArticleComment(BaseArticlesTestCase):
"""This class tests for comment article"""
def setUp(self):
super().setUp()
self.register()
self.comment = {"comment": {"body": "comment on this "}, "mentions": ['tester001']}
self.thread = {"comment": {"body": "comment on this thread "}}
self.user = {
"user": {
"username": "tester001",
"email": "test@example.com",
"password": "@AFFsecret123"
}
}
def test_create_comment(self):
"""Authenticated user can add comment"""
self.register_and_login(self.user)
slug = self.create_article()['slug']
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_get_comment_related_article(self):
"""Test get comment related to article"""
self.register_and_login(self.user)
slug = self.create_article()['slug']
comment = self.comment
response = self.client.get(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
response = self.client.get(
reverse("articles:comments", kwargs={'slug': '1j23kj2'}),
data=comment,
format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_comment_unavailable_article(self):
"""Test commenting on non-existing-comment"""
self.register_and_login(self.user)
slug = None
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
self.client.get(reverse("articles:comments", kwargs={'slug': slug}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_thread_comment_related_article(self):
"""Test thread comment related to article"""
self.register_and_login(self.user)
slug = self.create_article()["slug"]
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
pk = json.loads(response.content)['data']['comment']['id']
response = self.client.post(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
data=self.thread,
format="json")
self.client.get(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
format="json")
self.client.get(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': 123
}),
format="json")
self.client.get(reverse('articles:comment-users', kwargs={
'slug': slug,
}))
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_thread_comment_unavailable_article(self):
"""Test thread-commenting on non-existing-comment"""
self.register_and_login(self.user)
slug = None
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
pk = json.loads(response.content)
response = self.client.post(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
data=self.thread,
format="json")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_invalid_articleslug_comment(self):
"""Test incorrect slug in commenting"""
self.register_and_login(self.user)
slug = "fake-slug"
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
pk = json.loads(response.content)
response = self.client.post(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
data=self.thread,
format="json")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_comment(self):
"""Test update comment related to article"""
self.register_and_login(self.user)
slug = self.create_article()["slug"]
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
pk = json.loads(response.content)["data"]["comment"]["id"]
self.client.put(
reverse("articles:a-comment", kwargs={
'slug': 'jsdklfjlakdf',
'pk': pk
}),
data=self.thread,
format="json")
self.client.put(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': 123
}),
data=self.thread,
format="json")
response = self.client.put(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
data=self.thread,
format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_delete_comment(self):
"""Test delete comment related to article"""
self.register_and_login(self.user)
slug = self.create_article()["slug"]
comment = self.comment
response = self.client.post(
reverse("articles:comments", kwargs={'slug': slug}),
data=comment,
format="json")
pk = json.loads(response.content)['data']['comment']['id']
self.client.delete(
reverse("articles:a-comment", kwargs={
'slug': 'jadjfadlf',
'pk': pk
}),
data=self.thread,
format="json")
self.client.delete(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': 1223
}),
data=self.thread,
format="json")
response = self.client.delete(
reverse("articles:a-comment", kwargs={
'slug': slug,
'pk': pk
}),
data=self.thread,
format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
| 35.375
| 91
| 0.542756
| 706
| 7,075
| 5.332861
| 0.124646
| 0.058433
| 0.070651
| 0.067198
| 0.838247
| 0.792563
| 0.792563
| 0.773971
| 0.755113
| 0.694555
| 0
| 0.009864
| 0.326502
| 7,075
| 199
| 92
| 35.552764
| 0.780273
| 0.048763
| 0
| 0.79661
| 0
| 0
| 0.122232
| 0.003291
| 0
| 0
| 0
| 0
| 0.045198
| 1
| 0.050847
| false
| 0.00565
| 0.022599
| 0
| 0.079096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1397af718e3e329ed4686b2d81d3af32c9402f0d
| 19,104
|
py
|
Python
|
official/vision/beta/projects/volumetric_models/modeling/nn_blocks_3d.py
|
juinquok/models
|
c0e374b87e829cde45553ab27346e00c5970ceb9
|
[
"Apache-2.0"
] | 3
|
2022-03-05T10:46:52.000Z
|
2022-03-22T06:00:05.000Z
|
official/vision/beta/projects/volumetric_models/modeling/nn_blocks_3d.py
|
juinquok/models
|
c0e374b87e829cde45553ab27346e00c5970ceb9
|
[
"Apache-2.0"
] | 4
|
2021-07-17T23:59:03.000Z
|
2021-07-21T10:18:14.000Z
|
official/vision/beta/projects/volumetric_models/modeling/nn_blocks_3d.py
|
juinquok/models
|
c0e374b87e829cde45553ab27346e00c5970ceb9
|
[
"Apache-2.0"
] | 2
|
2021-08-17T22:07:17.000Z
|
2021-12-25T12:25:47.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains common building blocks for neural networks."""
from typing import Sequence, Union
# Import libraries
import tensorflow as tf
from official.modeling import tf_utils
from official.vision.beta.modeling.layers import nn_layers
@tf.keras.utils.register_keras_serializable(package='Vision')
class BasicBlock3DVolume(tf.keras.layers.Layer):
"""A basic 3d convolution block."""
def __init__(self,
filters: Union[int, Sequence[int]],
strides: Union[int, Sequence[int]],
kernel_size: Union[int, Sequence[int]],
kernel_initializer: str = 'VarianceScaling',
kernel_regularizer: tf.keras.regularizers.Regularizer = None,
bias_regularizer: tf.keras.regularizers.Regularizer = None,
activation: str = 'relu',
use_sync_bn: bool = False,
norm_momentum: float = 0.99,
norm_epsilon: float = 0.001,
use_batch_normalization: bool = False,
**kwargs):
"""Creates a basic 3d convolution block applying one or more convolutions.
Args:
filters: A list of `int` numbers or an `int` number of filters. Given an
`int` input, a single convolution is applied; otherwise a series of
convolutions are applied.
strides: An integer or tuple/list of 3 integers, specifying the strides of
the convolution along each spatial dimension. Can be a single integer to
specify the same value for all spatial dimensions.
kernel_size: An integer or tuple/list of 3 integers, specifying the depth,
height and width of the 3D convolution window. Can be a single integer
to specify the same value for all spatial dimensions.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
Default to None.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d.
Default to None.
activation: `str` name of the activation function.
use_sync_bn: if True, use synchronized batch normalization.
norm_momentum: `float` normalization omentum for the moving average.
norm_epsilon: `float` small float added to variance to avoid dividing by
zero.
use_batch_normalization: Wheher to use batch normalizaion or not.
**kwargs: keyword arguments to be passed.
"""
super().__init__(**kwargs)
if isinstance(filters, int):
self._filters = [filters]
else:
self._filters = filters
self._strides = strides
self._kernel_size = kernel_size
self._kernel_initializer = kernel_initializer
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
self._activation = activation
self._use_sync_bn = use_sync_bn
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._use_batch_normalization = use_batch_normalization
if use_sync_bn:
self._norm = tf.keras.layers.experimental.SyncBatchNormalization
else:
self._norm = tf.keras.layers.BatchNormalization
if tf.keras.backend.image_data_format() == 'channels_last':
self._bn_axis = -1
else:
self._bn_axis = 1
self._activation_fn = tf_utils.get_activation(activation)
def build(self, input_shape: tf.TensorShape):
"""Builds the basic 3d convolution block."""
self._convs = []
self._norms = []
for filters in self._filters:
self._convs.append(
tf.keras.layers.Conv3D(
filters=filters,
kernel_size=self._kernel_size,
strides=self._strides,
padding='same',
data_format=tf.keras.backend.image_data_format(),
activation=None))
self._norms.append(
self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon))
super(BasicBlock3DVolume, self).build(input_shape)
def get_config(self):
"""Returns the config of the basic 3d convolution block."""
config = {
'filters': self._filters,
'strides': self._strides,
'kernel_size': self._kernel_size,
'kernel_initializer': self._kernel_initializer,
'kernel_regularizer': self._kernel_regularizer,
'bias_regularizer': self._bias_regularizer,
'activation': self._activation,
'use_sync_bn': self._use_sync_bn,
'norm_momentum': self._norm_momentum,
'norm_epsilon': self._norm_epsilon,
'use_batch_normalization': self._use_batch_normalization
}
base_config = super(BasicBlock3DVolume, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, inputs: tf.Tensor, training: bool = None) -> tf.Tensor:
"""Runs forward pass on the input tensor."""
x = inputs
for conv, norm in zip(self._convs, self._norms):
x = conv(x)
if self._use_batch_normalization:
x = norm(x)
x = self._activation_fn(x)
return x
@tf.keras.utils.register_keras_serializable(package='Vision')
class ResidualBlock3DVolume(tf.keras.layers.Layer):
"""A residual 3d block."""
def __init__(self,
filters,
strides,
use_projection=False,
se_ratio=None,
stochastic_depth_drop_rate=None,
kernel_initializer='VarianceScaling',
kernel_regularizer=None,
bias_regularizer=None,
activation='relu',
use_sync_bn=False,
norm_momentum=0.99,
norm_epsilon=0.001,
**kwargs):
"""A residual 3d block with BN after convolutions.
Args:
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
se_ratio: `float` or None. Ratio of the Squeeze-and-Excitation layer.
stochastic_depth_drop_rate: `float` or None. if not None, drop rate for
the stochastic depth layer.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
Default to None.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d.
Default to None.
activation: `str` name of the activation function.
use_sync_bn: if True, use synchronized batch normalization.
norm_momentum: `float` normalization omentum for the moving average.
norm_epsilon: `float` small float added to variance to avoid dividing by
zero.
**kwargs: keyword arguments to be passed.
"""
super().__init__(**kwargs)
self._filters = filters
self._strides = strides
self._use_projection = use_projection
self._se_ratio = se_ratio
self._use_sync_bn = use_sync_bn
self._activation = activation
self._stochastic_depth_drop_rate = stochastic_depth_drop_rate
self._kernel_initializer = kernel_initializer
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
if use_sync_bn:
self._norm = tf.keras.layers.experimental.SyncBatchNormalization
else:
self._norm = tf.keras.layers.BatchNormalization
if tf.keras.backend.image_data_format() == 'channels_last':
self._bn_axis = -1
else:
self._bn_axis = 1
self._activation_fn = tf_utils.get_activation(activation)
def build(self, input_shape):
if self._use_projection:
self._shortcut = tf.keras.layers.Conv3D(
filters=self._filters,
kernel_size=1,
strides=self._strides,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
self._conv1 = tf.keras.layers.Conv3D(
filters=self._filters,
kernel_size=3,
strides=self._strides,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
self._conv2 = tf.keras.layers.Conv3D(
filters=self._filters,
kernel_size=3,
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
if self._se_ratio and self._se_ratio > 0 and self._se_ratio <= 1:
self._squeeze_excitation = nn_layers.SqueezeExcitation(
in_filters=self._filters,
out_filters=self._filters,
se_ratio=self._se_ratio,
use_3d_input=True,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
self._squeeze_excitation = None
if self._stochastic_depth_drop_rate:
self._stochastic_depth = nn_layers.StochasticDepth(
self._stochastic_depth_drop_rate)
else:
self._stochastic_depth = None
super(ResidualBlock3DVolume, self).build(input_shape)
def get_config(self):
config = {
'filters': self._filters,
'strides': self._strides,
'use_projection': self._use_projection,
'se_ratio': self._se_ratio,
'stochastic_depth_drop_rate': self._stochastic_depth_drop_rate,
'kernel_initializer': self._kernel_initializer,
'kernel_regularizer': self._kernel_regularizer,
'bias_regularizer': self._bias_regularizer,
'activation': self._activation,
'use_sync_bn': self._use_sync_bn,
'norm_momentum': self._norm_momentum,
'norm_epsilon': self._norm_epsilon
}
base_config = super(ResidualBlock3DVolume, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, inputs, training=None):
shortcut = inputs
if self._use_projection:
shortcut = self._shortcut(shortcut)
shortcut = self._norm0(shortcut)
x = self._conv1(inputs)
x = self._norm1(x)
x = self._activation_fn(x)
x = self._conv2(x)
x = self._norm2(x)
if self._squeeze_excitation:
x = self._squeeze_excitation(x)
if self._stochastic_depth:
x = self._stochastic_depth(x, training=training)
return self._activation_fn(x + shortcut)
@tf.keras.utils.register_keras_serializable(package='Vision')
class BottleneckBlock3DVolume(tf.keras.layers.Layer):
"""A standard bottleneck block."""
def __init__(self,
filters,
strides,
dilation_rate=1,
use_projection=False,
se_ratio=None,
stochastic_depth_drop_rate=None,
kernel_initializer='VarianceScaling',
kernel_regularizer=None,
bias_regularizer=None,
activation='relu',
use_sync_bn=False,
norm_momentum=0.99,
norm_epsilon=0.001,
**kwargs):
"""A standard bottleneck 3d block with BN after convolutions.
Args:
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
dilation_rate: `int` dilation_rate of convolutions. Default to 1.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
se_ratio: `float` or None. Ratio of the Squeeze-and-Excitation layer.
stochastic_depth_drop_rate: `float` or None. if not None, drop rate for
the stochastic depth layer.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
Default to None.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2d.
Default to None.
activation: `str` name of the activation function.
use_sync_bn: if True, use synchronized batch normalization.
norm_momentum: `float` normalization omentum for the moving average.
norm_epsilon: `float` small float added to variance to avoid dividing by
zero.
**kwargs: keyword arguments to be passed.
"""
super().__init__(**kwargs)
self._filters = filters
self._strides = strides
self._dilation_rate = dilation_rate
self._use_projection = use_projection
self._se_ratio = se_ratio
self._use_sync_bn = use_sync_bn
self._activation = activation
self._stochastic_depth_drop_rate = stochastic_depth_drop_rate
self._kernel_initializer = kernel_initializer
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
if use_sync_bn:
self._norm = tf.keras.layers.experimental.SyncBatchNormalization
else:
self._norm = tf.keras.layers.BatchNormalization
if tf.keras.backend.image_data_format() == 'channels_last':
self._bn_axis = -1
else:
self._bn_axis = 1
self._activation_fn = tf_utils.get_activation(activation)
def build(self, input_shape):
if self._use_projection:
self._shortcut = tf.keras.layers.Conv3D(
filters=self._filters * 4,
kernel_size=1,
strides=self._strides,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
self._conv1 = tf.keras.layers.Conv3D(
filters=self._filters,
kernel_size=1,
strides=1,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
self._conv2 = tf.keras.layers.Conv3D(
filters=self._filters,
kernel_size=3,
strides=self._strides,
dilation_rate=self._dilation_rate,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
self._conv3 = tf.keras.layers.Conv3D(
filters=self._filters * 4,
kernel_size=1,
strides=1,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm3 = self._norm(
axis=self._bn_axis,
momentum=self._norm_momentum,
epsilon=self._norm_epsilon)
if self._se_ratio and self._se_ratio > 0 and self._se_ratio <= 1:
self._squeeze_excitation = nn_layers.SqueezeExcitation(
in_filters=self._filters * 4,
out_filters=self._filters * 4,
se_ratio=self._se_ratio,
use_3d_input=True,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
self._squeeze_excitation = None
if self._stochastic_depth_drop_rate:
self._stochastic_depth = nn_layers.StochasticDepth(
self._stochastic_depth_drop_rate)
else:
self._stochastic_depth = None
super(BottleneckBlock3DVolume, self).build(input_shape)
def get_config(self):
config = {
'filters': self._filters,
'strides': self._strides,
'dilation_rate': self._dilation_rate,
'use_projection': self._use_projection,
'se_ratio': self._se_ratio,
'stochastic_depth_drop_rate': self._stochastic_depth_drop_rate,
'kernel_initializer': self._kernel_initializer,
'kernel_regularizer': self._kernel_regularizer,
'bias_regularizer': self._bias_regularizer,
'activation': self._activation,
'use_sync_bn': self._use_sync_bn,
'norm_momentum': self._norm_momentum,
'norm_epsilon': self._norm_epsilon
}
base_config = super(BottleneckBlock3DVolume, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, inputs, training=None):
shortcut = inputs
if self._use_projection:
shortcut = self._shortcut(shortcut)
shortcut = self._norm0(shortcut)
x = self._conv1(inputs)
x = self._norm1(x)
x = self._activation_fn(x)
x = self._conv2(x)
x = self._norm2(x)
x = self._activation_fn(x)
x = self._conv3(x)
x = self._norm3(x)
if self._squeeze_excitation:
x = self._squeeze_excitation(x)
if self._stochastic_depth:
x = self._stochastic_depth(x, training=training)
return self._activation_fn(x + shortcut)
| 37.606299
| 80
| 0.679596
| 2,300
| 19,104
| 5.343043
| 0.12
| 0.027342
| 0.01538
| 0.029945
| 0.834405
| 0.807796
| 0.791765
| 0.781105
| 0.770038
| 0.752787
| 0
| 0.008109
| 0.238327
| 19,104
| 507
| 81
| 37.680473
| 0.836437
| 0.25513
| 0
| 0.815126
| 0
| 0
| 0.043892
| 0.005405
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033613
| false
| 0
| 0.011204
| 0
| 0.070028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13d2950d9fc78300debc4dd00cf64c7838a2403d
| 25,591
|
py
|
Python
|
Game_alpha_0.06.py
|
artem-bondar/simple-walking-game
|
83e6cfb3a91039dde9230c1b799434d0b90be3ee
|
[
"Unlicense"
] | null | null | null |
Game_alpha_0.06.py
|
artem-bondar/simple-walking-game
|
83e6cfb3a91039dde9230c1b799434d0b90be3ee
|
[
"Unlicense"
] | null | null | null |
Game_alpha_0.06.py
|
artem-bondar/simple-walking-game
|
83e6cfb3a91039dde9230c1b799434d0b90be3ee
|
[
"Unlicense"
] | null | null | null |
from tkinter import *
root = Tk()
#Глобальные переменные
hero_position = [0,0,0,'N']
HeroUp = PhotoImage(file="textures/HeroUp.png")
HeroDown = PhotoImage(file="textures/HeroDown.png")
HeroLeft = PhotoImage(file="textures/HeroLeft.png")
HeroRight = PhotoImage(file="textures/HeroRight.png")
HeroPositionImage = {'N':HeroUp,'E':HeroRight,'S':HeroDown,'W':HeroLeft}
t000 = PhotoImage(file="textures/empty.png")
t001 = PhotoImage(file="textures/grass.png")
t002 = PhotoImage(file="textures/grass2.png")
t003 = PhotoImage(file="textures/grass3.png")
t004 = PhotoImage(file="textures/grass4.png")
t005 = PhotoImage(file="textures/sand.png")
t006 = PhotoImage(file="textures/needles.png")
t007 = PhotoImage(file="textures/water.png")
t008 = PhotoImage(file="textures/water2.png")
t009 = PhotoImage(file="textures/water3.png")
t010 = PhotoImage(file="textures/stone.png")
t011 = PhotoImage(file="textures/stone2.png")
t012 = PhotoImage(file="textures/stone3.png")
t013 = PhotoImage(file="textures/snow.png")
t014 = PhotoImage(file="textures/moss.png")
t015 = PhotoImage(file="textures/brick.png")
t016 = PhotoImage(file="textures/board.png")
t017 = PhotoImage(file="textures/cheese.png")
t018 = PhotoImage(file="textures/black.png")
all_textures = {'000':t000,'001':t001,'002':t002,'003':t003,'004':t004,'005':t005,'005':t005,'006':t006,'007':t007,'008':t008,'009':t009,'010':t010,'011':t011,'012':t012,'013':t013,'014':t014,'015':t015,'016':t016,'017':t017,'018':t018}
walkable_textures = [t000,t001,t002,t003,t004,t005,t010,t011,t013,t014,t016,t017]
#Классы
class map_generator:
def __init__(self):
self.id = 0
self.neighbours = [0,0,0,0]
self.name = ""
self.description = ""
self.map = []
self.sprites = []
for i in range(17):
self.map.append([])
self.sprites.append([])
for j in range(17):
self.map[i].append(all_textures['000'])
self.sprites[i].append((all_sprites['000'],None))
def render(self,window):
global map_window,hero_position
hero_position[0] = self.id
map_window.destroy()
map_window = Canvas(window,width=561,height=561,bd=0)
map_window.place(x=0,y=0)
for i in range(17):
for j in range(17):
map_window.create_image(18+j*33,18+i*33,image=self.map[i][j])
if self.sprites[i][j] != (s000,None) :
if self.sprites[i][j][1] != None:
map_window.create_image(18+j*33,18+i*33,image=self.sprites[i][j][0])
elif self.sprites[i][j][0] != t000:
map_window.create_image(18+j*33,18+i*33,image=self.sprites[i][j][0])
def load_textures(self,texture_pack):
for i in range(17):
for j in range(17):
self.map[i][j] = all_textures[texture_pack[i][j]]
def load_sprites(self,sprite_pack):
for i in range(17):
for j in range(17):
if type(sprite_pack[i][j]) != str:
self.sprites[i][j] = (sprite_pack[i][j].texture,sprite_pack[i][j])
else:
self.sprites[i][j] = (all_sprites[sprite_pack[i][j]],None)
def load_map(self):
txt = open("maps/map"+str(self.id)+".txt",'r')
for i in range(17):
self.map[i] = txt.readline().split()
for j in range(17):
self.map[i][j] = all_textures[self.map[i][j]]
txt.close()
class teleporter:
def __init__(self,input_texture,input_target_coordinates): #Пример ввода: '000',[0,0,0,'N']
global all_sprites
self.texture = all_sprites[input_texture]
self.target_coordinates = input_target_coordinates
def teleport():
global hero_position,HeroPositionImage,all_maps
all_maps[self.target_coordinates[0]].render(root)
for i in range(4):
hero_position[i] = self.target_coordinates[i]
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage[hero_position[3]])
self.function = lambda: 0
self.function_on_spot = teleport
class ifrit():
def __init__(self,input_texture,name_input):
global all_sprites
self.texture = all_sprites[input_texture]
self.name = name_input
self.username = ""
def dialog():
def enter(event):
def reply():
rad0.destroy()
rad1.destroy()
rad2.destroy()
rad3.destroy()
ok.destroy()
if var.get() == 0:
info.config(text="-Ну и катись колобком дальше.",anchor=W)
elif var.get() == 1:
info.config(text="-Слышь, ты с какого района вообще, а? Пшел на.",anchor=W)
elif var.get() == 2:
info.config(text="*Элементаль промолчал*",anchor=W)
elif var.get() == 3:
info.config(text="-...",anchor=W)
if self.username == "":
self.username = entry.get()
info.config(text="-Что привело тебя в мою обитель, "+entry.get()+"?",anchor=W)
else:
info.config(text="-Что привело тебя в мою обитель, "+self.username+"?",anchor=W)
entry.destroy()
var=IntVar()
var.set(0)
rad0 = Radiobutton(talk,text="-Так зашел.",variable=var,value=0)
rad1 = Radiobutton(talk,text="-Пофиг. Есть мобила позвонить?",variable=var,value=1)
rad2 = Radiobutton(talk,text="-Миссия важная выдана мне.",variable=var,value=2)
rad3 = Radiobutton(talk,text="*Промолчать*",variable=var,value=3)
rad0.pack(anchor=W)
rad1.pack(anchor=W)
rad2.pack(anchor=W)
rad3.pack(anchor=W)
ok = Button(talk,text="OK",command=reply)
ok.pack()
talk = Toplevel(root)
talk.focus()
info = Label(talk,text="-Я могучий и древнейший элементаль огня, именуемый "+self.name+".\n-Каково же твое имя, странник?",fg='red')
info.pack()
entry = Entry(talk)
entry.pack()
if self.username != "":
enter(1)
talk.bind("<Return>",enter)
self.function = dialog
#Глобальные переменные, использующие классы: спрайты
s000 = PhotoImage(file="sprites/empty.png")
s001 = PhotoImage(file="sprites/manhole.png")
s002 = PhotoImage(file="sprites/ledder.png")
s003 = PhotoImage(file="sprites/fire elementile.png")
all_sprites = {'000':s000,'001':s001,'002':s002,'003':s003}
walkable_sprites = [s000,s001,s002]
manhole1 = teleporter('001',[10,2,14,'N'])
manhole2 = teleporter('001',[10,14,5,'S'])
ledder1 = teleporter('002',[1,5,1,'N'])
ledder2 = teleporter('002',[2,12,13,'S'])
fire = ifrit('003','Lambda Velorum')
sprite_pack1=[['000', '000', fire, '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000',manhole1, '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000']]
sprite_pack2=[['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', manhole2, '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000']]
sprite_pack10=[['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', ledder2, '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', ledder1, '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000']]
sprite_pack11=[['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', fire, '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000'], ['000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000', '000']]
#Классовые переменные
map1=map_generator()
map1.id=1
map1.name='Start'
map1.neighbours=[2,3,4,5]
map1.load_map()
map1.load_sprites(sprite_pack1)
map2=map_generator()
map2.id=2
map2.name='2-test'
map2.neighbours=[0,7,1,6]
map2.load_map()
map2.load_sprites(sprite_pack2)
map3=map_generator()
map3.id=3
map3.name='3-test'
map3.neighbours=[7,0,8,1]
map3.load_map()
map4=map_generator()
map4.id=4
map4.name='4-test'
map4.neighbours=[1,8,0,9]
map4.load_map()
map5=map_generator()
map5.id=5
map5.name='5-test'
map5.neighbours=[6,1,9,0]
map5.load_map()
map6=map_generator()
map6.id=6
map6.name='6-test'
map6.neighbours=[0,2,5,0]
map6.load_map()
map7=map_generator()
map7.id=7
map7.name='7-test'
map7.neighbours=[0,0,3,2]
map7.load_map()
map8=map_generator()
map8.id=8
map8.name='8-test'
map8.neighbours=[3,0,0,4]
map8.load_map()
map9=map_generator()
map9.id=9
map9.name='9-test'
map9.neighbours=[5,4,0,0]
map9.load_map()
map10=map_generator()
map10.id=10
map10.name='Canalisation'
map10.neighbours=[11,0,0,0]
map10.load_map()
map10.load_sprites(sprite_pack10)
map11=map_generator()
map11.id=11
map11.name='Canalisation 2'
map11.neighbours=[0,0,10,0]
map11.load_map()
map11.load_sprites(sprite_pack11)
all_maps = {map1.id:map1,map2.id:map2,map3.id:map3,map4.id:map4,map5.id:map5,map6.id:map6,map7.id:map7,map8.id:map8,map9.id:map9,map10.id:map10,map11.id:map11}
hero_position = [1,0,0,'N']
#Функции
def interact(event):
global hero_position
if hero_position[3] == 'N' and hero_position[2] != 0 and all_maps[hero_position[0]].sprites[hero_position[2]-1][hero_position[1]][1] != None:
all_maps[hero_position[0]].sprites[hero_position[2]-1][hero_position[1]][1].function()
elif hero_position[3] == 'S' and hero_position[2] != 16 and all_maps[hero_position[0]].sprites[hero_position[2]+1][hero_position[1]][1] != None:
all_maps[hero_position[0]].sprites[hero_position[2]+1][hero_position[1]][1].function()
elif hero_position[3] == 'W' and hero_position[1] != 0 and all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]-1][1] != None:
all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]-1][1].function()
elif hero_position[3] == 'E' and hero_position[1] != 16 and all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]+1][1] != None:
all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]+1][1].function()
elif all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][1] != None:
all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][1].function_on_spot()
def move_down(event):
global hero_position,HeroPositionImage,all_maps,all_sprites
if hero_position[3] != 'S':
hero_position[3] = 'S'
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['S'])
else:
if hero_position[2] != 16 and all_maps[hero_position[0]].map[hero_position[2]+1][hero_position[1]] in walkable_textures and all_maps[hero_position[0]].sprites[hero_position[2]+1][hero_position[1]][0] in walkable_sprites:
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
hero_position[2]+=1
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['S'])
elif hero_position[2] == 16:
if all_maps[hero_position[0]].neighbours[2] != 0 and all_maps[all_maps[hero_position[0]].neighbours[2]].map[0][hero_position[1]] in walkable_textures and all_maps[all_maps[hero_position[0]].neighbours[2]].sprites[0][hero_position[1]][0] in walkable_sprites:
hero_position[2] = 0
all_maps[all_maps[hero_position[0]].neighbours[2]].render(root)
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['S'])
def move_up(event):
global hero_position,HeroPositionImage,all_maps,all_sprites
if hero_position[3] != 'N':
hero_position[3] = 'N'
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['N'])
else:
if hero_position[2] != 0 and all_maps[hero_position[0]].map[hero_position[2]-1][hero_position[1]] in walkable_textures and all_maps[hero_position[0]].sprites[hero_position[2]-1][hero_position[1]][0] in walkable_sprites:
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
hero_position[2]-=1
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['N'])
elif hero_position[2] == 0:
if all_maps[hero_position[0]].neighbours[0] != 0 and all_maps[all_maps[hero_position[0]].neighbours[0]].map[16][hero_position[1]] in walkable_textures and all_maps[all_maps[hero_position[0]].neighbours[0]].sprites[16][hero_position[1]][0] in walkable_sprites:
hero_position[2] = 16
all_maps[all_maps[hero_position[0]].neighbours[0]].render(root)
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['N'])
def move_right(event):
global hero_position,HeroPositionImage,all_maps,all_sprites
if hero_position[3] != 'E':
hero_position[3] = 'E'
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['E'])
else:
if hero_position[1] != 16 and all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]+1] in walkable_textures and all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]+1][0] in walkable_sprites:
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
hero_position[1]+=1
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['E'])
elif hero_position[1] == 16:
if all_maps[hero_position[0]].neighbours[1] != 0 and all_maps[all_maps[hero_position[0]].neighbours[1]].map[hero_position[2]][0] in walkable_textures and all_maps[all_maps[hero_position[0]].neighbours[1]].sprites[hero_position[2]][0][0] in walkable_sprites:
hero_position[1] = 0
all_maps[all_maps[hero_position[0]].neighbours[1]].render(root)
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['E'])
def move_left(event):
global hero_position,HeroPositionImage,all_maps,all_sprites
if hero_position[3] != 'W':
hero_position[3] = 'W'
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['W'])
else:
if hero_position[1] != 0 and all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]-1] in walkable_textures and all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]-1][0] in walkable_sprites:
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].map[hero_position[2]][hero_position[1]])
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=all_maps[hero_position[0]].sprites[hero_position[2]][hero_position[1]][0])
hero_position[1]-=1
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['W'])
elif hero_position[1] == 0:
if all_maps[hero_position[0]].neighbours[3] != 0 and all_maps[all_maps[hero_position[0]].neighbours[3]].map[hero_position[2]][16] in walkable_textures and all_maps[all_maps[hero_position[0]].neighbours[3]].sprites[hero_position[2]][16][0] in walkable_sprites:
hero_position[1] = 16
all_maps[all_maps[hero_position[0]].neighbours[3]].render(root)
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['W'])
#Ткинтер
root.geometry('565x565')
root.bind("<Return>",interact)
root.bind("<Down>",move_down)
root.bind("<Up>",move_up)
root.bind("<Right>",move_right)
root.bind("<Left>",move_left)
map_window = Canvas(root,width=561,height=561,bd=0)
map1.render(root)
map_window.create_image(18+hero_position[1]*33,18+hero_position[2]*33,image=HeroPositionImage['N'])
root.mainloop()
| 78.021341
| 2,075
| 0.58798
| 3,779
| 25,591
| 3.8534
| 0.074623
| 0.469716
| 0.698393
| 0.923774
| 0.672847
| 0.656847
| 0.654031
| 0.642769
| 0.632125
| 0.615918
| 0
| 0.214934
| 0.160056
| 25,591
| 327
| 2,076
| 78.259939
| 0.462526
| 0.005588
| 0
| 0.178571
| 0
| 0
| 0.180024
| 0.002516
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051948
| false
| 0
| 0.003247
| 0
| 0.064935
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13df6f13ec523138df0ccfeca2d2289950179536
| 5,432
|
py
|
Python
|
modules/cli/helpers.py
|
anouarbensaad/VulnX
|
2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb
|
[
"MIT"
] | 10
|
2019-05-10T04:43:54.000Z
|
2019-05-16T00:45:46.000Z
|
modules/cli/helpers.py
|
anouarbensaad/VulnX
|
2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb
|
[
"MIT"
] | null | null | null |
modules/cli/helpers.py
|
anouarbensaad/VulnX
|
2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb
|
[
"MIT"
] | 8
|
2019-05-13T04:25:11.000Z
|
2019-05-15T02:47:49.000Z
|
class Helpers():
@staticmethod
def _general_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
clear/cls clear the vulnx screen
use <Variable> Use an variable.
info <Variable> Get information about an available variable.
set <variable> <value> Sets a context-specific variable to a value to use while using vulnx.
variables Prints all previously specified variables.
banner Display banner.
history Display command-line most important history from the beginning.
makerc Save command-line history to a file.
exec <command> Execute a system command without closing the vulnx-mode
exit/quit Exit the vulnx-mode
""")
@staticmethod
def _url_action_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
timeout set timeout
ports scan ports
domain get domains & sub domains
cms info get cms info (version , user ..)
web info get web info
dump dns dump dns get sub domains [mx-server..]
run exploit run exploits corresponding to cms
clear/cls clear the vulnx screen
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
back move back from current context
""")
# dorks - command helpers.
@staticmethod
def _dorks_action_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
list list dorks
set dork set exploit name
clear/cls clear the vulnx screen
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
exec <command> Execute a system command without closing the vulnx-mode
back move back from current context
""")
@staticmethod
def _dorks_setdork_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
pages set num page
output output file.
run search web with specified dork
clear/cls clear the vulnx screen
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
exec <command> Execute a system command without closing the vulnx-mode
back move back from current context
""")
@staticmethod
def _dorks_setdork_page_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
output output file.
run search web with specified dork
clear/cls clear the vulnx screen
exec <command> Execute a system command without closing the vulnx-mode
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
back move back from current context
""")
@staticmethod
def _dorks_setdork_output_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
pages set num page
run search web with specified dork
exec <command> Execute a system command without closing the vulnx-mode
clear/cls clear the vulnx screen
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
back move back from current context
""")
@staticmethod
def _dorks_setdork_page_output_help():
print("""
Command Description
-------- -------------
help/? Show this help menu.
run search web with specified dork
clear/cls clear the vulnx screen
exec <command> Execute a system command without closing the vulnx-mode
history Display command-line most important history from the beginning.
variables Prints all previously specified variables.
back move back from current context
""")
| 46.827586
| 101
| 0.490059
| 486
| 5,432
| 5.427984
| 0.1893
| 0.042456
| 0.042456
| 0.071645
| 0.800986
| 0.800986
| 0.76232
| 0.76232
| 0.76232
| 0.726687
| 0
| 0
| 0.448454
| 5,432
| 115
| 102
| 47.234783
| 0.880507
| 0.004418
| 0
| 0.783019
| 0
| 0.009434
| 0.899926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066038
| true
| 0
| 0.066038
| 0
| 0.141509
| 0.066038
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
13f8c8490d864bc990046035fa997bc826bc005b
| 152
|
py
|
Python
|
flash/tabular/regression/__init__.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 1,457
|
2021-01-28T20:40:16.000Z
|
2022-03-31T06:22:05.000Z
|
flash/tabular/regression/__init__.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 1,123
|
2021-01-28T20:37:56.000Z
|
2022-03-31T19:34:44.000Z
|
flash/tabular/regression/__init__.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 170
|
2021-01-29T00:41:39.000Z
|
2022-03-29T16:09:52.000Z
|
from flash.tabular.regression.data import TabularRegressionData # noqa: F401
from flash.tabular.regression.model import TabularRegressor # noqa: F401
| 50.666667
| 77
| 0.828947
| 18
| 152
| 7
| 0.611111
| 0.142857
| 0.253968
| 0.412698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044118
| 0.105263
| 152
| 2
| 78
| 76
| 0.882353
| 0.138158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b917dc26ea37dc7f08d2c3bd1fa8e749878da59d
| 2,350
|
py
|
Python
|
api/tests/tests_api_utils.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
api/tests/tests_api_utils.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
api/tests/tests_api_utils.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
from dashboard.tests.base import UnitTest
from tools.models import ThermalChamber
from ..utils import thermal_chamber_use
class ApiUtilsTestCase(UnitTest):
def setUp(self):
super(ApiUtilsTestCase, self).setUp()
ThermalChamber.objects.create(operating_mode="FROID", xelon_number="A123456789", created_by=self.user)
ThermalChamber.objects.create(operating_mode="CHAUD", xelon_number="A987654321", created_by=self.user)
def test_thermal_chamber_use_hot(self):
temp = "41°C"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=True)
self.assertEqual(len(thermals), 1)
for thermal in thermals:
self.assertEqual(thermal.operating_mode, "CHAUD")
temp = "25°C"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=False, active=False)
self.assertEqual(len(thermals), 1)
for thermal in thermals:
self.assertEqual(thermal.operating_mode, "CHAUD")
def test_thermal_chamber_use_freeze(self):
temp = "-1°C"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=True)
self.assertEqual(len(thermals), 1)
for thermal in thermals:
self.assertEqual(thermal.operating_mode, "FROID")
temp = "25°C"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=False, active=False)
self.assertEqual(len(thermals), 1)
for thermal in thermals:
self.assertEqual(thermal.operating_mode, "FROID")
def test_thermal_chamber_use_hors_ligne(self):
temp = "41°C"
thermal_chamber_use(temp)
temp = "Hors ligne"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=False, active=False)
self.assertEqual(len(thermals), 1)
temp = "-20°C"
thermal_chamber_use(temp)
temp = "Hors ligne"
thermal_chamber_use(temp)
thermals = ThermalChamber.objects.filter(start_time__isnull=False, stop_time__isnull=False, active=False)
self.assertEqual(len(thermals), 2)
| 40.517241
| 113
| 0.698723
| 287
| 2,350
| 5.473868
| 0.202091
| 0.106938
| 0.129854
| 0.106938
| 0.812858
| 0.716104
| 0.716104
| 0.716104
| 0.716104
| 0.703374
| 0
| 0.018737
| 0.205106
| 2,350
| 57
| 114
| 41.22807
| 0.819058
| 0
| 0
| 0.702128
| 0
| 0
| 0.040426
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 1
| 0.085106
| false
| 0
| 0.06383
| 0
| 0.170213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b972490af82d6e6b93e85b0091c66f94d23873fd
| 1,987
|
py
|
Python
|
catalyst_rl/utils/tests/test_metric_manager.py
|
rhololkeolke/catalyst-rl
|
ec18ff4a58b6d00652f772231db8de86debb4b3d
|
[
"Apache-2.0"
] | 46
|
2020-03-27T20:12:32.000Z
|
2021-11-21T19:08:51.000Z
|
catalyst_rl/utils/tests/test_metric_manager.py
|
rhololkeolke/catalyst-rl
|
ec18ff4a58b6d00652f772231db8de86debb4b3d
|
[
"Apache-2.0"
] | 2
|
2020-04-06T10:43:04.000Z
|
2020-07-01T18:26:10.000Z
|
catalyst_rl/utils/tests/test_metric_manager.py
|
rhololkeolke/catalyst-rl
|
ec18ff4a58b6d00652f772231db8de86debb4b3d
|
[
"Apache-2.0"
] | 5
|
2020-04-17T14:09:53.000Z
|
2021-05-10T08:58:29.000Z
|
import numpy as np
from catalyst_rl.utils import tools
def test_to_value():
tools.MetricManager._to_single_value(np.float32(1.0))
def test_epoch_metrics():
metrics = tools.MetricManager("valid", "test", True)
metrics.begin_epoch()
metrics.begin_loader("train")
metrics.begin_batch()
metrics.add_batch_value("test", 2)
metrics.end_batch()
metrics.begin_batch()
metrics.add_batch_value("test", 2)
metrics.end_batch()
metrics.end_loader()
metrics.begin_loader("valid")
metrics.begin_batch()
metrics.add_batch_value("test", 1)
metrics.end_batch()
metrics.begin_batch()
metrics.add_batch_value("test", 0)
metrics.end_batch()
metrics.end_loader()
metrics.end_epoch_train()
assert metrics.epoch_values["valid"]["test"] == 0.5
assert metrics.epoch_values["train"]["test"] == 2
def test_best():
metrics = tools.MetricManager("valid", "test", True)
metrics.begin_epoch()
metrics.begin_loader("valid")
metrics.begin_batch()
metrics.add_batch_value("test", 1)
metrics.end_batch()
metrics.end_loader()
metrics.end_epoch_train()
metrics.begin_epoch()
metrics.begin_loader("valid")
metrics.begin_batch()
metrics.add_batch_value("test", 0)
metrics.end_batch()
metrics.end_loader()
metrics.end_epoch_train()
assert metrics.is_best
assert metrics.best_main_metric_value == 0
metrics = tools.MetricManager("valid", "test", False)
metrics.begin_epoch()
metrics.begin_loader("valid")
metrics.begin_batch()
metrics.add_batch_value("test", 1)
metrics.end_batch()
metrics.end_loader()
metrics.end_epoch_train()
metrics.begin_epoch()
metrics.begin_loader("valid")
metrics.begin_batch()
metrics.add_batch_value("test", 0)
metrics.end_batch()
metrics.end_loader()
metrics.end_epoch_train()
assert not metrics.is_best
assert metrics.best_main_metric_value == 1
| 21.597826
| 57
| 0.691998
| 259
| 1,987
| 5.007722
| 0.146718
| 0.17579
| 0.104857
| 0.148034
| 0.844256
| 0.818042
| 0.818042
| 0.811103
| 0.811103
| 0.747109
| 0
| 0.010442
| 0.180674
| 1,987
| 91
| 58
| 21.835165
| 0.786241
| 0
| 0
| 0.770492
| 0
| 0
| 0.05385
| 0
| 0
| 0
| 0
| 0
| 0.098361
| 1
| 0.04918
| false
| 0
| 0.032787
| 0
| 0.081967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9a3f4e147d29f780193a84bcfca5e7ea2b4b680
| 31,176
|
py
|
Python
|
azure-mgmt-web/azure/mgmt/web/operations/recommendations_operations.py
|
NMijat1024/azure-sdk-for-python
|
c49e1d6d797dceaca81813cafb1a486d67185182
|
[
"MIT"
] | null | null | null |
azure-mgmt-web/azure/mgmt/web/operations/recommendations_operations.py
|
NMijat1024/azure-sdk-for-python
|
c49e1d6d797dceaca81813cafb1a486d67185182
|
[
"MIT"
] | 1
|
2018-11-29T14:46:42.000Z
|
2018-11-29T14:46:42.000Z
|
azure-mgmt-web/azure/mgmt/web/operations/recommendations_operations.py
|
NMijat1024/azure-sdk-for-python
|
c49e1d6d797dceaca81813cafb1a486d67185182
|
[
"MIT"
] | 1
|
2018-08-28T14:36:47.000Z
|
2018-08-28T14:36:47.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class RecommendationsOperations(object):
"""RecommendationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: API Version. Constant value: "2018-02-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-02-01"
self.config = config
def list(
self, featured=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""List all recommendations for a subscription.
List all recommendations for a subscription.
:param featured: Specify <code>true</code> to return only the most
critical recommendations. The default is <code>false</code>, which
returns all recommendations.
:type featured: bool
:param filter: Filter is specified by using OData syntax. Example:
$filter=channel eq 'Api' or channel eq 'Notification' and startTime eq
2014-01-01T00:00:00Z and endTime eq 2014-12-31T23:59:59Z and timeGrain
eq duration'[PT1H|PT1M|P1D]
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Recommendation
:rtype:
~azure.mgmt.web.models.RecommendationPaged[~azure.mgmt.web.models.Recommendation]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if featured is not None:
query_parameters['featured'] = self._serialize.query("featured", featured, 'bool')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.RecommendationPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RecommendationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/recommendations'}
def reset_all_filters(
self, custom_headers=None, raw=False, **operation_config):
"""Reset all recommendation opt-out settings for a subscription.
Reset all recommendation opt-out settings for a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.reset_all_filters.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
reset_all_filters.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/recommendations/reset'}
def disable_recommendation_for_subscription(
self, name, custom_headers=None, raw=False, **operation_config):
"""Disables the specified rule so it will not apply to a subscription in
the future.
Disables the specified rule so it will not apply to a subscription in
the future.
:param name: Rule name
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.disable_recommendation_for_subscription.metadata['url']
path_format_arguments = {
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
disable_recommendation_for_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/recommendations/{name}/disable'}
def list_history_for_web_app(
self, resource_group_name, site_name, expired_only=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""Get past recommendations for an app, optionally specified by the time
range.
Get past recommendations for an app, optionally specified by the time
range.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Name of the app.
:type site_name: str
:param expired_only: Specify <code>false</code> to return all
recommendations. The default is <code>true</code>, which returns only
expired recommendations.
:type expired_only: bool
:param filter: Filter is specified by using OData syntax. Example:
$filter=channel eq 'Api' or channel eq 'Notification' and startTime eq
2014-01-01T00:00:00Z and endTime eq 2014-12-31T23:59:59Z and timeGrain
eq duration'[PT1H|PT1M|P1D]
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Recommendation
:rtype:
~azure.mgmt.web.models.RecommendationPaged[~azure.mgmt.web.models.Recommendation]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_history_for_web_app.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if expired_only is not None:
query_parameters['expiredOnly'] = self._serialize.query("expired_only", expired_only, 'bool')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.RecommendationPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RecommendationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_history_for_web_app.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendationHistory'}
def list_recommended_rules_for_web_app(
self, resource_group_name, site_name, featured=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""Get all recommendations for an app.
Get all recommendations for an app.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Name of the app.
:type site_name: str
:param featured: Specify <code>true</code> to return only the most
critical recommendations. The default is <code>false</code>, which
returns all recommendations.
:type featured: bool
:param filter: Return only channels specified in the filter. Filter is
specified by using OData syntax. Example: $filter=channel eq 'Api' or
channel eq 'Notification'
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Recommendation
:rtype:
~azure.mgmt.web.models.RecommendationPaged[~azure.mgmt.web.models.Recommendation]
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_recommended_rules_for_web_app.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if featured is not None:
query_parameters['featured'] = self._serialize.query("featured", featured, 'bool')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.RecommendationPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.RecommendationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_recommended_rules_for_web_app.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations'}
def disable_all_for_web_app(
self, resource_group_name, site_name, custom_headers=None, raw=False, **operation_config):
"""Disable all recommendations for an app.
Disable all recommendations for an app.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Name of the app.
:type site_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.disable_all_for_web_app.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
disable_all_for_web_app.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations/disable'}
def reset_all_filters_for_web_app(
self, resource_group_name, site_name, custom_headers=None, raw=False, **operation_config):
"""Reset all recommendation opt-out settings for an app.
Reset all recommendation opt-out settings for an app.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Name of the app.
:type site_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.reset_all_filters_for_web_app.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
reset_all_filters_for_web_app.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations/reset'}
def get_rule_details_by_web_app(
self, resource_group_name, site_name, name, update_seen=None, recommendation_id=None, custom_headers=None, raw=False, **operation_config):
"""Get a recommendation rule for an app.
Get a recommendation rule for an app.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Name of the app.
:type site_name: str
:param name: Name of the recommendation.
:type name: str
:param update_seen: Specify <code>true</code> to update the last-seen
timestamp of the recommendation object.
:type update_seen: bool
:param recommendation_id: The GUID of the recommedation object if you
query an expired one. You don't need to specify it to query an active
entry.
:type recommendation_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: RecommendationRule or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.web.models.RecommendationRule or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`DefaultErrorResponseException<azure.mgmt.web.models.DefaultErrorResponseException>`
"""
# Construct URL
url = self.get_rule_details_by_web_app.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if update_seen is not None:
query_parameters['updateSeen'] = self._serialize.query("update_seen", update_seen, 'bool')
if recommendation_id is not None:
query_parameters['recommendationId'] = self._serialize.query("recommendation_id", recommendation_id, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.DefaultErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RecommendationRule', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_rule_details_by_web_app.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations/{name}'}
def disable_recommendation_for_site(
self, resource_group_name, site_name, name, custom_headers=None, raw=False, **operation_config):
"""Disables the specific rule for a web site permanently.
Disables the specific rule for a web site permanently.
:param resource_group_name: Name of the resource group to which the
resource belongs.
:type resource_group_name: str
:param site_name: Site name
:type site_name: str
:param name: Rule name
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.disable_recommendation_for_site.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
disable_recommendation_for_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations/{name}/disable'}
| 48.7125
| 196
| 0.662336
| 3,447
| 31,176
| 5.791703
| 0.071657
| 0.02755
| 0.025546
| 0.032458
| 0.91104
| 0.901523
| 0.881286
| 0.876177
| 0.85574
| 0.840313
| 0
| 0.00573
| 0.238677
| 31,176
| 639
| 197
| 48.788732
| 0.835391
| 0.299525
| 0
| 0.786184
| 0
| 0.013158
| 0.15789
| 0.083493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042763
| false
| 0
| 0.013158
| 0
| 0.115132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a16d6d61863081ab76bcf343edb92cf4313cc75
| 7,371
|
py
|
Python
|
sdk/python/pulumi_proxmoxve/permission/outputs.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_proxmoxve/permission/outputs.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-11-23T07:11:46.000Z
|
2022-02-10T09:18:13.000Z
|
sdk/python/pulumi_proxmoxve/permission/outputs.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GroupAcl',
'PoolMember',
'UserAcl',
'GetGroupAclResult',
'GetPoolMemberResult',
'GetUserAclResult',
]
@pulumi.output_type
class GroupAcl(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "roleId":
suggest = "role_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GroupAcl. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GroupAcl.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GroupAcl.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
path: str,
role_id: str,
propagate: Optional[bool] = None):
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "role_id", role_id)
if propagate is not None:
pulumi.set(__self__, "propagate", propagate)
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> str:
return pulumi.get(self, "role_id")
@property
@pulumi.getter
def propagate(self) -> Optional[bool]:
return pulumi.get(self, "propagate")
@pulumi.output_type
class PoolMember(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "datastoreId":
suggest = "datastore_id"
elif key == "nodeName":
suggest = "node_name"
elif key == "vmId":
suggest = "vm_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PoolMember. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PoolMember.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PoolMember.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
datastore_id: Optional[str] = None,
id: Optional[str] = None,
node_name: Optional[str] = None,
type: Optional[str] = None,
vm_id: Optional[int] = None):
if datastore_id is not None:
pulumi.set(__self__, "datastore_id", datastore_id)
if id is not None:
pulumi.set(__self__, "id", id)
if node_name is not None:
pulumi.set(__self__, "node_name", node_name)
if type is not None:
pulumi.set(__self__, "type", type)
if vm_id is not None:
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter(name="datastoreId")
def datastore_id(self) -> Optional[str]:
return pulumi.get(self, "datastore_id")
@property
@pulumi.getter
def id(self) -> Optional[str]:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="nodeName")
def node_name(self) -> Optional[str]:
return pulumi.get(self, "node_name")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> Optional[int]:
return pulumi.get(self, "vm_id")
@pulumi.output_type
class UserAcl(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "roleId":
suggest = "role_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in UserAcl. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
UserAcl.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
UserAcl.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
path: str,
role_id: str,
propagate: Optional[bool] = None):
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "role_id", role_id)
if propagate is not None:
pulumi.set(__self__, "propagate", propagate)
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> str:
return pulumi.get(self, "role_id")
@property
@pulumi.getter
def propagate(self) -> Optional[bool]:
return pulumi.get(self, "propagate")
@pulumi.output_type
class GetGroupAclResult(dict):
def __init__(__self__, *,
path: str,
propagate: bool,
role_id: str):
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "propagate", propagate)
pulumi.set(__self__, "role_id", role_id)
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter
def propagate(self) -> bool:
return pulumi.get(self, "propagate")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> str:
return pulumi.get(self, "role_id")
@pulumi.output_type
class GetPoolMemberResult(dict):
def __init__(__self__, *,
datastore_id: str,
id: str,
node_name: str,
type: str,
vm_id: int):
pulumi.set(__self__, "datastore_id", datastore_id)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "node_name", node_name)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter(name="datastoreId")
def datastore_id(self) -> str:
return pulumi.get(self, "datastore_id")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="nodeName")
def node_name(self) -> str:
return pulumi.get(self, "node_name")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> int:
return pulumi.get(self, "vm_id")
@pulumi.output_type
class GetUserAclResult(dict):
def __init__(__self__, *,
path: str,
propagate: bool,
role_id: str):
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "propagate", propagate)
pulumi.set(__self__, "role_id", role_id)
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter
def propagate(self) -> bool:
return pulumi.get(self, "propagate")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> str:
return pulumi.get(self, "role_id")
| 27.710526
| 130
| 0.586759
| 867
| 7,371
| 4.693195
| 0.106113
| 0.043008
| 0.070288
| 0.102728
| 0.828213
| 0.802654
| 0.795036
| 0.736053
| 0.732121
| 0.732121
| 0
| 0.000191
| 0.288699
| 7,371
| 265
| 131
| 27.815094
| 0.775892
| 0.024013
| 0
| 0.751174
| 1
| 0.014085
| 0.111157
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173709
| false
| 0
| 0.023474
| 0.103286
| 0.356808
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
e06dd96e75fcb6625fd86ce3cd96170940051c46
| 6,413
|
py
|
Python
|
avalanche/benchmarks/utils/data_loader.py
|
ryanlindeborg/avalanche
|
32333776e729bad22f369f8923bc32416c9edcf9
|
[
"MIT"
] | 12
|
2021-04-16T15:49:59.000Z
|
2022-02-27T18:04:58.000Z
|
avalanche/benchmarks/utils/data_loader.py
|
ryanlindeborg/avalanche
|
32333776e729bad22f369f8923bc32416c9edcf9
|
[
"MIT"
] | null | null | null |
avalanche/benchmarks/utils/data_loader.py
|
ryanlindeborg/avalanche
|
32333776e729bad22f369f8923bc32416c9edcf9
|
[
"MIT"
] | 2
|
2021-06-22T04:11:52.000Z
|
2021-11-12T03:27:18.000Z
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 01-12-2020 #
# Author(s): Antonio Carta #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from torch.utils.data.dataloader import DataLoader
from typing import Dict
class MultiTaskDataLoader:
def __init__(self, data_dict: Dict, oversample_small_tasks: bool = False,
**kwargs):
""" Custom data loader for multi-task training.
The dictionary `data_dict` maps task ids into their
corresponding datasets.
When iterating over the data, it returns sequentially a different
batch for each task (i.e. first a batch for task 1, then task 2,
and so on). If `oversample_small_tasks == True` smaller tasks are
oversampled to match the largest task.
It is suggested to use this loader only if tasks have approximately the
same length.
:param data_dict: a dictionary with task ids as keys and Datasets
as values.
:param oversample_small_tasks: whether smaller tasks should be
oversampled to match the largest one.
:param kwargs: data loader arguments used to instantiate the loader for
each task separately. See pytorch :class:`DataLoader`.
"""
self.data_dict = data_dict
self.loaders_dict: Dict[int, DataLoader] = {}
self.oversample_small_tasks = oversample_small_tasks
for task_id, data in self.data_dict.items():
self.loaders_dict[task_id] = DataLoader(data, **kwargs)
self.max_len = max([len(d) for d in self.loaders_dict.values()])
def __iter__(self):
iter_dataloaders = {}
for t in self.loaders_dict.keys():
iter_dataloaders[t] = iter(self.loaders_dict[t])
max_len = max([len(d) for d in iter_dataloaders.values()])
try:
for it in range(max_len):
# list() is necessary because we may remove keys from the
# dictionary. This would break the generator.
for t in list(self.data_dict.keys()):
t_loader = iter_dataloaders[t]
try:
x, y, *_ = next(t_loader)
yield t, x, y
except StopIteration:
# StopIteration is thrown if dataset ends.
# reinitialize data loader
if self.oversample_small_tasks:
# reinitialize data loader
iter_dataloaders[t] = iter(t_loader)
self.current_dataloader = iter_dataloaders[t]
x, y = next(t_loader)
else:
del iter_dataloaders[t]
continue
yield t, x, y
except StopIteration:
return
def __len__(self):
return self.max_len * len(self.loaders_dict)
class MultiTaskMultiBatchDataLoader:
def __init__(self, data_dict: Dict, oversample_small_tasks: bool = False,
**kwargs):
""" Custom data loader for multi-task training.
The dictionary `data_dict` maps task ids into their
corresponding datasets.
mini-batches emitted by this dataloader are dictionaries with task
labels as keys and mini-batches as values. Therefore, each mini-batch
contains separate data for each task (i.e. key 1 batch for task 1).
If `oversample_small_tasks == True` smaller tasks are oversampled to
match the largest task.
It is suggested to use this loader only if tasks have approximately the
same length.
:param data_dict: a dictionary with task ids as keys and Datasets
as values.
:param oversample_small_task: whether smaller tasks should be
oversampled to match the largest one.
:param kwargs: data loader arguments used to instantiate the loader for
each task separately. See pytorch :class:`DataLoader`.
"""
self.data_dict = data_dict
self.loaders_dict: Dict[int, DataLoader] = {}
self.oversample_small_tasks = oversample_small_tasks
for task_id, data in self.data_dict.items():
self.loaders_dict[task_id] = DataLoader(data, **kwargs)
self.max_len = max([len(d) for d in self.loaders_dict.values()])
def __iter__(self):
iter_dataloaders = {}
for t in self.loaders_dict.keys():
iter_dataloaders[t] = iter(self.loaders_dict[t])
max_len = max([len(d) for d in iter_dataloaders.values()])
try:
for it in range(max_len):
mb_curr = {}
# list() is necessary because we may remove keys from the
# dictionary. This would break the generator.
for t in list(self.data_dict.keys()):
t_loader = iter_dataloaders[t]
try:
x, y, *_ = next(t_loader)
except StopIteration:
# StopIteration is thrown if dataset ends.
if self.oversample_small_tasks:
# reinitialize data loader
iter_dataloaders[t] = iter(t_loader)
self.current_dataloader = iter_dataloaders[t]
x, y = next(t_loader)
else:
del iter_dataloaders[t]
continue
mb_curr[t] = x, y
yield mb_curr
except StopIteration:
return
def __len__(self):
return self.max_len
| 45.161972
| 80
| 0.52877
| 690
| 6,413
| 4.749275
| 0.230435
| 0.034178
| 0.067135
| 0.025633
| 0.808666
| 0.800732
| 0.79585
| 0.79585
| 0.763503
| 0.763503
| 0
| 0.004049
| 0.383752
| 6,413
| 141
| 81
| 45.48227
| 0.825152
| 0.399813
| 0
| 0.873239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0
| 0.028169
| 0.028169
| 0.197183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ee5058e5c6e0fe21ec1151c15860fe8b550ac3b
| 29,678
|
py
|
Python
|
python_msx_sdk/api/template_assignments_api.py
|
CiscoDevNet/python-msx-sdk
|
d7e0a08c656504b4f4551d263e67c671a2a04b3f
|
[
"MIT"
] | null | null | null |
python_msx_sdk/api/template_assignments_api.py
|
CiscoDevNet/python-msx-sdk
|
d7e0a08c656504b4f4551d263e67c671a2a04b3f
|
[
"MIT"
] | null | null | null |
python_msx_sdk/api/template_assignments_api.py
|
CiscoDevNet/python-msx-sdk
|
d7e0a08c656504b4f4551d263e67c671a2a04b3f
|
[
"MIT"
] | null | null | null |
"""
MSX SDK
MSX SDK client. # noqa: E501
The version of the OpenAPI document: 1.0.9
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from python_msx_sdk.api_client import ApiClient, Endpoint as _Endpoint
from python_msx_sdk.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from python_msx_sdk.model.error import Error
from python_msx_sdk.model.template_assignment import TemplateAssignment
from python_msx_sdk.model.template_assignment_response import TemplateAssignmentResponse
from python_msx_sdk.model.template_assignment_status_patch import TemplateAssignmentStatusPatch
from python_msx_sdk.model.template_assignments_page import TemplateAssignmentsPage
class TemplateAssignmentsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __batch_assign_template(
self,
id,
request_body,
**kwargs
):
"""Assigns a template to one or more tenants. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.batch_assign_template(id, request_body, async_req=True)
>>> result = thread.get()
Args:
id (str):
request_body ([str]):
Keyword Args:
inheritable (bool): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[TemplateAssignmentResponse]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['request_body'] = \
request_body
return self.call_with_http_info(**kwargs)
self.batch_assign_template = _Endpoint(
settings={
'response_type': ([TemplateAssignmentResponse],),
'auth': [],
'endpoint_path': '/template/api/v8/templates/{id}/assignments/add',
'operation_id': 'batch_assign_template',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'id',
'request_body',
'inheritable',
],
'required': [
'id',
'request_body',
],
'nullable': [
],
'enum': [
],
'validation': [
'request_body',
]
},
root_map={
'validations': {
('request_body',): {
},
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'request_body':
([str],),
'inheritable':
(bool,),
},
'attribute_map': {
'id': 'id',
'inheritable': 'inheritable',
},
'location_map': {
'id': 'path',
'request_body': 'body',
'inheritable': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__batch_assign_template
)
def __batch_unassign_template(
self,
id,
request_body,
**kwargs
):
"""Unassigns a template from one or more tenants. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.batch_unassign_template(id, request_body, async_req=True)
>>> result = thread.get()
Args:
id (str):
request_body ([str]):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[TemplateAssignmentResponse]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['request_body'] = \
request_body
return self.call_with_http_info(**kwargs)
self.batch_unassign_template = _Endpoint(
settings={
'response_type': ([TemplateAssignmentResponse],),
'auth': [],
'endpoint_path': '/template/api/v8/templates/{id}/assignments/remove',
'operation_id': 'batch_unassign_template',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'id',
'request_body',
],
'required': [
'id',
'request_body',
],
'nullable': [
],
'enum': [
],
'validation': [
'request_body',
]
},
root_map={
'validations': {
('request_body',): {
},
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'request_body':
([str],),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'request_body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__batch_unassign_template
)
def __get_assignment(
self,
id,
**kwargs
):
"""Gets a template assignment. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assignment(id, async_req=True)
>>> result = thread.get()
Args:
id (str): ID of template assignment record.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TemplateAssignment
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_assignment = _Endpoint(
settings={
'response_type': (TemplateAssignment,),
'auth': [],
'endpoint_path': '/template/api/v8/templates/assignments/{id}',
'operation_id': 'get_assignment',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_assignment
)
def __get_assignment_history(
self,
id,
**kwargs
):
"""Gets a template assignment history. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_assignment_history(id, async_req=True)
>>> result = thread.get()
Args:
id (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[TemplateAssignment]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_assignment_history = _Endpoint(
settings={
'response_type': ([TemplateAssignment],),
'auth': [],
'endpoint_path': '/template/api/v8/templates/assignments/{id}/history',
'operation_id': 'get_assignment_history',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_assignment_history
)
def __get_template_assignments_page(
self,
page,
page_size,
**kwargs
):
"""Returns a page of template assignments. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_assignments_page(page, page_size, async_req=True)
>>> result = thread.get()
Args:
page (int):
page_size (int):
Keyword Args:
template_id (str): [optional]
tenant_id (str): [optional]
calculate_total_items (bool, none_type): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TemplateAssignmentsPage
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['page'] = \
page
kwargs['page_size'] = \
page_size
return self.call_with_http_info(**kwargs)
self.get_template_assignments_page = _Endpoint(
settings={
'response_type': (TemplateAssignmentsPage,),
'auth': [],
'endpoint_path': '/template/api/v8/templates/assignments',
'operation_id': 'get_template_assignments_page',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'page',
'page_size',
'template_id',
'tenant_id',
'calculate_total_items',
],
'required': [
'page',
'page_size',
],
'nullable': [
'calculate_total_items',
],
'enum': [
],
'validation': [
'page',
'page_size',
]
},
root_map={
'validations': {
('page',): {
'inclusive_minimum': 0,
},
('page_size',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'page':
(int,),
'page_size':
(int,),
'template_id':
(str,),
'tenant_id':
(str,),
'calculate_total_items':
(bool, none_type,),
},
'attribute_map': {
'page': 'page',
'page_size': 'pageSize',
'template_id': 'templateId',
'tenant_id': 'tenantId',
'calculate_total_items': 'calculateTotalItems',
},
'location_map': {
'page': 'query',
'page_size': 'query',
'template_id': 'query',
'tenant_id': 'query',
'calculate_total_items': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_template_assignments_page
)
def __update_assignment_status(
self,
id,
template_assignment_status_patch,
**kwargs
):
"""Updates a template assignment status. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_assignment_status(id, template_assignment_status_patch, async_req=True)
>>> result = thread.get()
Args:
id (str): ID of template assignment record.
template_assignment_status_patch (TemplateAssignmentStatusPatch):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TemplateAssignment
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['template_assignment_status_patch'] = \
template_assignment_status_patch
return self.call_with_http_info(**kwargs)
self.update_assignment_status = _Endpoint(
settings={
'response_type': (TemplateAssignment,),
'auth': [],
'endpoint_path': '/template/api/v8/templates/assignments/{id}',
'operation_id': 'update_assignment_status',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'id',
'template_assignment_status_patch',
],
'required': [
'id',
'template_assignment_status_patch',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'template_assignment_status_patch':
(TemplateAssignmentStatusPatch,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'template_assignment_status_patch': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__update_assignment_status
)
| 35.973333
| 107
| 0.454983
| 2,454
| 29,678
| 5.236756
| 0.084352
| 0.029414
| 0.024278
| 0.025212
| 0.834176
| 0.809587
| 0.795736
| 0.775426
| 0.763443
| 0.75784
| 0
| 0.003192
| 0.461588
| 29,678
| 824
| 108
| 36.01699
| 0.801051
| 0.307534
| 0
| 0.627907
| 1
| 0
| 0.21788
| 0.057653
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012522
| false
| 0
| 0.0161
| 0
| 0.041145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1663369b901311891266a11647f1ecf9db3f8131
| 168
|
py
|
Python
|
pysit/solvers/variable_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 64
|
2015-09-08T06:23:27.000Z
|
2022-03-09T23:35:24.000Z
|
pysit/solvers/variable_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 23
|
2015-10-08T01:14:24.000Z
|
2021-07-15T11:37:05.000Z
|
pysit/solvers/variable_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 48
|
2015-06-25T14:48:22.000Z
|
2021-12-06T19:50:25.000Z
|
from .variable_density_acoustic_time_scalar_1D import *
from .variable_density_acoustic_time_scalar_2D import *
from .variable_density_acoustic_time_scalar_3D import *
| 42
| 55
| 0.892857
| 24
| 168
| 5.625
| 0.416667
| 0.266667
| 0.422222
| 0.6
| 0.911111
| 0.911111
| 0.637037
| 0
| 0
| 0
| 0
| 0.019231
| 0.071429
| 168
| 3
| 56
| 56
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
1680e1e481adedde5c6a8fb13ebdb68039b0c585
| 14,166
|
py
|
Python
|
desdeo_emo/utilities/plotlyanimate.py
|
gialmisi/desdeo-emo
|
4835084824f6478c63513cb5b22380d9b27c3a9a
|
[
"MIT"
] | 3
|
2021-05-02T17:42:39.000Z
|
2022-02-16T05:22:56.000Z
|
desdeo_emo/utilities/plotlyanimate.py
|
gialmisi/desdeo-emo
|
4835084824f6478c63513cb5b22380d9b27c3a9a
|
[
"MIT"
] | 32
|
2019-10-30T08:33:13.000Z
|
2022-03-12T00:54:02.000Z
|
desdeo_emo/utilities/plotlyanimate.py
|
gialmisi/desdeo-emo
|
4835084824f6478c63513cb5b22380d9b27c3a9a
|
[
"MIT"
] | 12
|
2019-10-16T10:00:47.000Z
|
2022-03-17T13:31:41.000Z
|
from typing import Union
import numpy as np
import pandas as pd
import plotly.graph_objs as go
from plotly.offline.offline import plot
def animate_init_(data: Union[np.ndarray, pd.DataFrame, list], filename: str) -> dict:
"""Plot the first (or zeroth) iteration of a population.
Intended as a frames object. Plots Scatter for 2D and 3D data.
Plots parallel coordinate plot for higher dimensional data.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
Contains the data to be plotted. Each row is an individual's objective values.
filename : str
Contains the name of the file to which the plot is saved.
Returns
-------
dict
Plotly figure object
"""
numobj = data.shape[1]
if numobj == 2:
figure = animate_2d_init_(data, filename)
elif numobj == 3:
figure = animate_3d_init_(data, filename)
elif numobj >= 4:
figure = animate_parallel_coords_init_(data, filename)
print("Plot saved as: ", filename)
print("View the plot by opening the file in browser.")
print("To view the plot in Jupyter Notebook, use the IFrame command.")
return figure
def animate_next_(
data: Union[np.ndarray, pd.DataFrame, list],
figure: dict,
filename: str,
generation: int = None,
) -> dict:
"""Plot the next set of individuals in an animation.
Plots scatter for 2D and 3D data, parallel coordinate plot for 4D and up.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
The objective values to be plotted
figure : dict
Plotly figure object compatible dict
filename : str
Name of the file to which the plot is saved
generation : int
Iteration Number
Returns
-------
dict
Plotly Figure Object
"""
numobj = data.shape[1]
if generation is None:
generation = len(figure["frames"])
if numobj == 2:
figure = animate_2d_next_(data, figure, filename, generation)
elif numobj == 3:
figure = animate_3d_next_(data, figure, filename, generation)
elif numobj >= 4:
figure = animate_parallel_coords_next_(data, figure, filename, generation)
return figure
def animate_2d_init_(
data: Union[np.ndarray, pd.DataFrame, list], filename: str
) -> dict:
"""Initiate a 2D scatter animation.
Only for 2D data.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
Objective values
filename : str
Name of the file to which plot is saved
Returns
-------
dict
Plotly Figure Object
"""
figure = {"data": [], "layout": {}, "frames": []}
figure["layout"]["xaxis"] = {"autorange": True}
figure["layout"]["yaxis"] = {"autorange": True}
figure["layout"]["hovermode"] = "closest"
figure["layout"]["sliders"] = {
"args": ["transition", {"duration": 400, "easing": "cubic-in-out"}],
"initialValue": "1952",
"plotlycommand": "animate",
"visible": True,
}
sliders_dict = {
"active": 0,
"yanchor": "top",
"xanchor": "left",
"currentvalue": {
"font": {"size": 20},
"prefix": "Iteration:",
"visible": True,
"xanchor": "right",
},
"transition": {"duration": 300, "easing": "cubic-in-out"},
"pad": {"b": 10, "t": 50},
"len": 0.9,
"x": 0.1,
"y": 0,
"steps": [],
}
figure["layout"]["sliders"] = [sliders_dict]
data_dict = {
"x": list(data[:, 0]),
"y": list(data[:, 1]),
"mode": "markers",
"marker": {
"size": 5,
"color": "rgba(255, 182, 193, .9)",
"line": dict(width=2),
},
}
figure["data"].append(data_dict)
plot(figure, filename=filename)
animate_2d_next_(data, figure, filename, 0)
return figure
def animate_2d_next_(
data: Union[np.ndarray, pd.DataFrame, list],
figure: dict,
filename: str,
generation: int,
) -> dict:
"""Plot the next set of individuals in a 2D scatter animation.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
The objective values to be plotted
figure : dict
Plotly figure object compatible dict
filename : str
Name of the file to which the plot is saved
generation : int
Iteration Number
Returns
-------
dict
Plotly Figure Object
"""
frame = {"data": [], "name": str(generation)}
sliders_dict = figure["layout"]["sliders"][0]
data_dict = {
"x": list(data[:, 0]),
"y": list(data[:, 1]),
"mode": "markers",
"marker": {
"size": 5,
"color": "rgba(255, 182, 193, .9)",
"line": dict(width=2),
},
}
frame["data"].append(data_dict)
figure["frames"].append(frame)
slider_step = {
"args": [
[generation],
{
"frame": {"duration": 300, "redraw": False},
"mode": "immediate",
"transition": {"duration": 300},
},
],
"label": generation,
"method": "animate",
}
sliders_dict["steps"].append(slider_step)
figure["layout"]["sliders"] = [sliders_dict]
plot(figure, auto_open=False, filename=filename)
return figure
def animate_3d_init_(
data: Union[np.ndarray, pd.DataFrame, list], filename: str
) -> dict:
"""Plot the first (or zeroth) iteration of a population.
Intended as a frames object. Plots Scatter 3D data.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
Contains the data to be plotted. Each row is an individual's objective values.
filename : str
Contains the name of the file to which the plot is saved.
Returns
-------
dict
Plotly figure object
"""
figure = {"data": [], "layout": {}, "frames": []}
figure["layout"]["hovermode"] = "closest"
figure["layout"]["sliders"] = {
"args": ["transition", {"duration": 400, "easing": "cubic-in-out"}],
"initialValue": "1",
"plotlycommand": "animate",
"visible": True,
}
sliders_dict = {
"active": 0,
"yanchor": "top",
"xanchor": "left",
"currentvalue": {
"font": {"size": 20},
"prefix": "Iteration:",
"visible": True,
"xanchor": "right",
},
"transition": {"duration": 300, "easing": "cubic-in-out"},
"pad": {"b": 10, "t": 50},
"len": 0.9,
"x": 0.1,
"y": 0,
"steps": [],
}
figure["layout"]["sliders"] = [sliders_dict]
figure["layout"]["scene"] = dict(
xaxis=dict(
backgroundcolor="rgb(200, 200, 230)",
gridcolor="rgb(255, 255, 255)",
showbackground=True,
zerolinecolor="rgb(255, 255, 255)",
),
yaxis=dict(
backgroundcolor="rgb(230, 200,230)",
gridcolor="rgb(255, 255, 255)",
showbackground=True,
zerolinecolor="rgb(255, 255, 255)",
),
zaxis=dict(
backgroundcolor="rgb(230, 230,200)",
gridcolor="rgb(255, 255, 255)",
showbackground=True,
zerolinecolor="rgb(255, 255, 255)",
),
)
data_dict = go.Scatter3d(
x=list(data[:, 0]),
y=list(data[:, 1]),
z=list(data[:, 2]),
mode="markers",
marker=dict(
size=8,
color=data[:, 2],
colorscale="Viridis",
opacity=0.5,
line=dict(width=2, color="black", colorscale="Viridis"),
),
)
figure["data"].append(data_dict)
plot(figure, filename=filename)
animate_3d_next_(data, figure, filename, 0)
return figure
def animate_3d_next_(
data: Union[np.ndarray, pd.DataFrame, list],
figure: dict,
filename: str,
generation: int,
) -> dict:
"""Plot the next set of individuals in an animation.
Plots scatter for 3D data.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
The objective values to be plotted
figure : dict
Plotly figure object compatible dict
filename : str
Name of the file to which the plot is saved
generation : int
Iteration Number
Returns
-------
dict
Plotly Figure Object
"""
frame = {"data": [], "name": str(generation)}
sliders_dict = figure["layout"]["sliders"][0]
data_dict = go.Scatter3d(
x=list(data[:, 0]),
y=list(data[:, 1]),
z=list(data[:, 2]),
mode="markers",
marker=dict(
size=8,
color=data[:, 2],
colorscale="Viridis",
opacity=0.5,
line=dict(width=2, color="black", colorscale="Viridis"),
),
)
frame["data"].append(data_dict)
figure["frames"].append(frame)
slider_step = {
"args": [
[generation],
{
"frame": {"duration": 300, "redraw": True},
"mode": "immediate",
"transition": {"duration": 300},
},
],
"label": generation,
"method": "animate",
}
sliders_dict["steps"].append(slider_step)
figure["layout"]["sliders"] = [sliders_dict]
plot(figure, auto_open=False, filename=filename)
return figure
def animate_parallel_coords_init_(
data: Union[np.ndarray, pd.DataFrame, list], filename: str
) -> dict:
"""Plot the first (or zeroth) iteration of a population.
Intended as a frames object. Plots parallel coordinate plot for >3D data.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
Contains the data to be plotted. Each row is an individual's objective values.
filename : str
Contains the name of the file to which the plot is saved.
Returns
-------
dict
Plotly figure object
"""
figure = {"data": [], "layout": {}, "frames": []}
objectives = pd.DataFrame(data)
figure["layout"]["hovermode"] = "closest"
figure["layout"]["sliders"] = {
"args": ["transition", {"duration": 400, "easing": "cubic-in-out"}],
"initialValue": "1952",
"plotlycommand": "animate",
"visible": True,
}
sliders_dict = {
"active": 0,
"yanchor": "top",
"xanchor": "left",
"currentvalue": {
"font": {"size": 20},
"prefix": "Iteration:",
"visible": True,
"xanchor": "right",
},
"transition": {"duration": 300, "easing": "cubic-in-out"},
"pad": {"b": 10, "t": 50},
"len": 0.9,
"x": 0.1,
"y": 0,
"steps": [],
}
figure["layout"]["sliders"] = [sliders_dict]
dimensions = [
dict(
range=[min(objectives[column]), max(objectives[column])],
label="f" + str(column),
values=objectives[column],
)
for column in objectives
]
line = dict(
color=objectives[0],
colorscale="Viridis",
showscale=True,
cmin=min(objectives[objectives.columns[0]]),
cmax=max(objectives[objectives.columns[0]]),
)
data_dict = go.Parcoords(line=line, dimensions=dimensions)
figure["data"].append(data_dict)
plot(figure, filename=filename)
animate_parallel_coords_next_(data, figure, filename, 0)
return figure
def animate_parallel_coords_next_(
data: Union[np.ndarray, pd.DataFrame, list],
figure: dict,
filename: str,
generation: int,
) -> dict:
"""Plot the next set of individuals in an animation.
Plots parallel coordinate plot for 4D and up.
Parameters
----------
data : Union[np.ndarray, pd.DataFrame, list]
The objective values to be plotted
figure : dict
Plotly figure object compatible dict
filename : str
Name of the file to which the plot is saved
generation : int
Iteration Number
Returns
-------
dict
Plotly Figure Object
"""
frame = {"data": [], "name": str(generation)}
objectives = pd.DataFrame(data)
sliders_dict = figure["layout"]["sliders"][0]
dimensions = [
dict(
range=[min(objectives[column]), max(objectives[column])],
label="f" + str(column),
values=objectives[column],
)
for column in objectives
]
line = dict(
color=objectives[0],
colorscale="Viridis",
showscale=True,
cmin=min(objectives[objectives.columns[0]]),
cmax=max(objectives[objectives.columns[0]]),
)
data_dict = go.Parcoords(line=line, dimensions=dimensions)
frame["data"].append(data_dict)
figure["frames"].append(frame)
slider_step = {
"args": [
[generation],
{
"frame": {"duration": 300, "redraw": True},
"mode": "immediate",
"transition": {"duration": 300},
},
],
"label": generation,
"method": "animate",
}
sliders_dict["steps"].append(slider_step)
figure["layout"]["sliders"] = [sliders_dict]
plot(figure, auto_open=False, filename=filename)
return figure
def test():
data1 = np.random.rand(100, 3)
data2 = np.square(data1)
data3 = np.square(data2)
data4 = np.square(data3)
data = [data2, data3, data4]
filename = "firsttest.html"
figure = animate_3d_init_(data1, filename)
for i in range(1, 4):
figure = animate_3d_next_(data[i - 1], figure, filename, i)
def test2():
data = np.random.rand(100, 1) * np.pi
obj = np.hstack((data, np.sin(data), np.cos(data), np.log(data)))
figure = animate_parallel_coords_init_(obj, "partest.html")
for i in range(1, 10):
obj = np.hstack((data, np.sin(data / i), np.cos(data / i), np.log(data / i)))
figure = animate_parallel_coords_next_(obj, figure, "partest.html", i)
| 28.503018
| 86
| 0.552379
| 1,572
| 14,166
| 4.910941
| 0.134224
| 0.025648
| 0.022798
| 0.037306
| 0.88342
| 0.852202
| 0.832642
| 0.799093
| 0.795078
| 0.771503
| 0
| 0.027965
| 0.298249
| 14,166
| 496
| 87
| 28.560484
| 0.748617
| 0.217775
| 0
| 0.735562
| 0
| 0
| 0.166856
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030395
| false
| 0
| 0.015198
| 0
| 0.069909
| 0.009119
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
168b19fb0e17706cde06a8dd135eb049bb872626
| 18,336
|
py
|
Python
|
UnitTests/test_MemberLoad_test.py
|
r0m30d4c/DlubalRFEM6
|
4bd0d744007bdc27d86d6ce535a507cdc81552ca
|
[
"MIT"
] | null | null | null |
UnitTests/test_MemberLoad_test.py
|
r0m30d4c/DlubalRFEM6
|
4bd0d744007bdc27d86d6ce535a507cdc81552ca
|
[
"MIT"
] | null | null | null |
UnitTests/test_MemberLoad_test.py
|
r0m30d4c/DlubalRFEM6
|
4bd0d744007bdc27d86d6ce535a507cdc81552ca
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append(".")
from RFEM.Loads.surfaceLoad import *
from RFEM.Loads.memberLoad import *
from RFEM.Loads.nodalLoad import *
from RFEM.LoadCasesAndCombinations.loadCase import *
from RFEM.LoadCasesAndCombinations.staticAnalysisSettings import *
from RFEM.TypesForMembers.memberHinge import *
from RFEM.TypesForNodes.nodalSupport import *
from RFEM.BasicObjects.solidSet import *
from RFEM.BasicObjects.surfaceSet import *
from RFEM.BasicObjects.memberSet import *
from RFEM.BasicObjects.lineSet import *
from RFEM.BasicObjects.opening import *
from RFEM.BasicObjects.solid import *
from RFEM.BasicObjects.surface import *
from RFEM.BasicObjects.member import *
from RFEM.BasicObjects.line import *
from RFEM.BasicObjects.node import *
from RFEM.BasicObjects.thickness import *
from RFEM.BasicObjects.section import *
from RFEM.BasicObjects.material import *
from RFEM.initModel import *
from RFEM.dataTypes import *
from RFEM.enums import *
def test_member_loads():
clientModel.service.begin_modification()
# Create Material
Material(1, 'S235')
# Create Thickness
Section(1, 'IPE 300')
Section(2, 'CHS 100x4')
# Create Nodes
Node(1, 0.0, 0.0, 0.0)
Node(2, 4.0, 0.0, 0.0)
Node(3, 0, 5, 0)
Node(4, 4, 5, 0)
Member(1, MemberType.TYPE_BEAM, '1', '2', 0, 1, 1)
Member(2, MemberType.TYPE_BEAM, '3', '4', 0, 2, 2)
# Create Nodal Supports
NodalSupport(1, '1', NodalSupportType.FIXED)
NodalSupport(2, '2', NodalSupportType.FIXED)
NodalSupport(3, '3', NodalSupportType.FIXED)
NodalSupport(4, '4', NodalSupportType.FIXED)
# Create Static Analysis Settings
StaticAnalysisSettings(1, '1. Ordnung', StaticAnalysisType.GEOMETRICALLY_LINEAR)
# Create Load Case
LoadCase(1, 'DEAD', [True, 0.0, 0.0, 1.0])
## Initial Member Load ##
MemberLoad(1, 1, '1', LoadDirectionType.LOAD_DIRECTION_LOCAL_Z, 5000)
## Force Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Force(0, 2, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[5000])
## Force Type Member Load with LOAD_DISTRIBUTION_UNIFORM with Eccentricity ##
MemberLoad.Force(0, 3, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[5000], force_eccentricity=True, params={'eccentricity_y_at_start' : 0.01, 'eccentricity_z_at_start': 0.02})
## Force Type Member Load with LOAD_DISTRIBUTION_UNIFORM_TOTAL ##
MemberLoad.Force(0, 4, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM_TOTAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[5000])
## Force Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_1 ##
MemberLoad.Force(0, 5, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, 5000, 1.2])
## Force Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_N ##
MemberLoad.Force(0, 6, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 5000, 2, 1, 2])
## Force Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x2 ##
MemberLoad.Force(0, 7, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, False, 5000, 1, 2, 3])
## Force Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x ##
MemberLoad.Force(0, 8, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 5000, 6000, 1, 2])
## Force Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_VARYING ##
MemberLoad.Force(0, 9, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 4000], [2, 1, 5000]])
## Force Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Force(0, 10, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 4000, 8000, 1, 2])
## Force Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Force(0, 11, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 4000, 8000, 1, 2])
## Force Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Force(0, 12, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[4000, 8000, 12000])
## Force Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Force(0, 13, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 4000], [2, 1, 5000]])
## Force Type Member Load with LOAD_DISTRIBUTION_VARYING_IN_Z ##
MemberLoad.Force(0, 14, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING_IN_Z, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 4000], [2, 1, 5000]])
## Moment Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Moment(0, 15, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[5000])
## Moment Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_1 ##
MemberLoad.Moment(0, 16, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, 5000, 1.2])
## Moment Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_N ##
MemberLoad.Moment(0, 17, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 5000, 2, 1, 2])
## Moment Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x2 ##
MemberLoad.Moment(0, 18, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, False, 5000, 1, 2, 3])
## Moment Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x ##
MemberLoad.Moment(0, 19, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 5000, 6000, 1, 2])
## Moment Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_VARYING ##
MemberLoad.Moment(0, 20, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 4000], [2, 1, 5000]])
## Moment Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Moment(0, 21, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 4000, 8000, 1, 2])
## Moment Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Moment(0, 22, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[False, False, 4000, 8000, 1, 2])
## Moment Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Moment(0, 23, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[4000, 8000, 12000])
## Moment Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Moment(0, 24, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 4000], [2, 1, 5000]])
## Mass Type Member Load ##
MemberLoad.Mass(0, 25, 1, mass_components=[1000])
## Temperature Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Temperature(0, 26, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[18, 2])
## Temperature Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Temperature(0, 27, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, 18, 20, False, False, 1, 2])
## Temperature Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Temperature(0, 28, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, 18, 20, False, False, 1, 2])
## Temperature Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Temperature(0, 29, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[1, 2, 3, 4, 5, 6])
## Temperature Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Temperature(0, 30, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 285, 289], [2, 1, 293, 297]])
## TemperatureChange Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.TemperatureChange(0, 31, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[18, 2])
## TemperatureChange Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.TemperatureChange(0, 32, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, 18, 20, False, False, 1, 2])
## TemperatureChange Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.TemperatureChange(0, 33, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, 18, 20, False, False, 1, 2])
## TemperatureChange Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.TemperatureChange(0, 34, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[1, 2, 3, 4, 5, 6])
## TemperatureChange Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.TemperatureChange(0, 35, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 285, 289], [2, 1, 293, 297]])
## AxialStrain Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.AxialStrain(0, 36, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, load_parameter=[0.005])
## AxialStrain Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.AxialStrain(0, 37, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, load_parameter=[12, 16, False, False, 1, 2])
## AxialStrain Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.AxialStrain(0, 38, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, load_parameter=[12, 16, False, False, 1, 2])
## AxialStrain Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.AxialStrain(0, 39, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, load_parameter=[1, 2, 3])
## AxialStrain Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.AxialStrain(0, 40, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, load_parameter=[[1, 1, 285, 289], [2, 1, 293, 297]])
## AxialDisplacement Type Member Load ##
MemberLoad.AxialDisplacement(0, 41, 1, '1', MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, 0.05)
## Precamber Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Precamber(0, 42, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[0.005])
## Precamber Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Precamber(0, 43, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Precamber Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Precamber(0, 44, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Precamber Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Precamber(0, 45, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[1, 2, 3])
## Precamber Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Precamber(0, 46, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 285], [2, 1, 293]])
## InitialPrestress Type Member Load ##
MemberLoad.InitialPrestress(0, 47, 1, '1', MemberLoadDirection.LOAD_DIRECTION_LOCAL_X, 50)
## Displacement Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Displacement(0, 48, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5])
## Displacement Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_1 ##
MemberLoad.Displacement(0, 49, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, 1])
## Displacement Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_N ##
MemberLoad.Displacement(0, 50, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, False, 1, 2])
## Displacement Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x2 ##
MemberLoad.Displacement(0, 51, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, False, False, 1, 2, 3])
## Displacement Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2 ##
MemberLoad.Displacement(0, 52, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, 0.6, False, False, 1, 2])
## Displacement Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_VARYING ##
MemberLoad.Displacement(0, 53, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [[0.001, 1, 1], [0.002, 2, 1]])
## Displacement Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Displacement(0, 54, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Displacement Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Displacement(0, 55, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Displacement Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Displacement(0, 56, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[1, 2, 3])
## Displacement Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Displacement(0, 57, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 285], [2, 1, 293]])
## Rotation Type Member Load with LOAD_DISTRIBUTION_UNIFORM ##
MemberLoad.Rotation(0, 58, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_UNIFORM, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5])
## Rotation Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_1 ##
MemberLoad.Rotation(0, 59, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, 1])
## Rotation Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_N ##
MemberLoad.Rotation(0, 60, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, False, 1, 2])
## Rotation Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2x2 ##
MemberLoad.Rotation(0, 61, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, False, False, False, 1, 2, 3])
## Rotation Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_2 ##
MemberLoad.Rotation(0, 62, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [0.5, 0.6, False, False, 1, 2])
## Rotation Type Member Load with LOAD_DISTRIBUTION_CONCENTRATED_VARYING ##
MemberLoad.Rotation(0, 63, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, [[1, 1, 285], [2, 1, 293]])
## Rotation Type Member Load with LOAD_DISTRIBUTION_TRAPEZOIDAL ##
MemberLoad.Rotation(0, 64, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Rotation Type Member Load with LOAD_DISTRIBUTION_TAPERED ##
MemberLoad.Rotation(0, 65, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_TAPERED, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[12, 16, False, False, 1, 2])
## Rotation Type Member Load with LOAD_DISTRIBUTION_PARABOLIC ##
MemberLoad.Rotation(0, 66, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[1, 2, 3])
## Rotation Type Member Load with LOAD_DISTRIBUTION_VARYING ##
MemberLoad.Rotation(0, 67, 1, '1', MemberLoadDistribution.LOAD_DISTRIBUTION_VARYING, MemberLoadDirection.LOAD_DIRECTION_LOCAL_Z, load_parameter=[[1, 1, 285], [2, 1, 293]])
## PipeContentFull Type Member Load ##
MemberLoad.PipeContentFull(0, 68, 1, '2', MemberLoadDirectionOrientation.LOAD_DIRECTION_FORWARD, 50)
Calculate_all()
print('Ready!')
clientModel.service.finish_modification()
| 65.72043
| 252
| 0.775196
| 2,265
| 18,336
| 6.026049
| 0.087417
| 0.147703
| 0.068723
| 0.176203
| 0.80973
| 0.80907
| 0.800791
| 0.784087
| 0.678804
| 0.62979
| 0
| 0.05682
| 0.122709
| 18,336
| 278
| 253
| 65.956835
| 0.791682
| 0.230748
| 0
| 0
| 0
| 0
| 0.01172
| 0.003328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| true
| 0
| 0.212389
| 0
| 0.221239
| 0.00885
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16ca7e93193da41c98ac9662cc9318be6b22657a
| 7,166
|
py
|
Python
|
src/services/cell_printer.py
|
Thealoe/ParametrizedMazeGen
|
33bd42508ca1b866143c8a3d56bcd7047171dfa3
|
[
"MIT"
] | null | null | null |
src/services/cell_printer.py
|
Thealoe/ParametrizedMazeGen
|
33bd42508ca1b866143c8a3d56bcd7047171dfa3
|
[
"MIT"
] | null | null | null |
src/services/cell_printer.py
|
Thealoe/ParametrizedMazeGen
|
33bd42508ca1b866143c8a3d56bcd7047171dfa3
|
[
"MIT"
] | null | null | null |
class CellPrinter():
def __init__(self):
pass
def get_print_string(self, cell_type, row):
if cell_type == 0:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟫🟫🟫"
if row == 2:
return "🟫🟫🟫"
if cell_type == 1:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟫🟩🟫"
if row == 2:
return "🟫🟫🟫"
if cell_type == 2:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟫🟩🟩"
if row == 2:
return "🟫🟫🟫"
if cell_type == 3:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟫🟩🟫"
if row == 2:
return "🟫🟩🟫"
if cell_type == 4:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟩🟩🟫"
if row == 2:
return "🟫🟫🟫"
if cell_type == 5:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟩🟩🟫"
if row == 2:
return "🟫🟫🟫"
if cell_type == 6:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟫🟩🟩"
if row == 2:
return "🟫🟫🟫"
if cell_type == 7:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟫🟩🟩"
if row == 2:
return "🟫🟩🟫"
if cell_type == 8:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟩🟩🟫"
if row == 2:
return "🟫🟩🟫"
if cell_type == 9:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟩🟩🟩"
if row == 2:
return "🟫🟫🟫"
if cell_type == 10:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟫🟩🟫"
if row == 2:
return "🟫🟩🟫"
if cell_type == 0:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟫🟫🟫"
if row == 2:
return "🟫🟫🟫"
if cell_type == 11:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟩🟩🟩"
if row == 2:
return "🟫🟫🟫"
if cell_type == 12:
if row == 0:
return "🟫🟫🟫"
if row == 1:
return "🟩🟩🟩"
if row == 2:
return "🟫🟩🟫"
if cell_type == 13:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟩🟩🟫"
if row == 2:
return "🟫🟩🟫"
if cell_type == 14:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟫🟩🟩"
if row == 2:
return "🟫🟩🟫"
if cell_type == 15:
if row == 0:
return "🟫🟩🟫"
if row == 1:
return "🟩🟩🟩"
if row == 2:
return "🟫🟩🟫"
def get_start(self, line_str):
return self.__change_middle_char(line_str, "🟦")
def get_end(self, line_str):
return self.__change_middle_char(line_str, "🟥")
def get_start_primitive_format(self, line_str):
return self.__change_middle_char(line_str, "S")
def get_end_primitive_format(self, line_str):
return self.__change_middle_char(line_str, "E")
def __change_middle_char(self, line_str, char):
l = list(line_str)
l[1] = char
return "".join(l)
def get_print_string_primitive_format(self, cell_type, row):
if cell_type == 0:
if row == 0:
return "XXX"
if row == 1:
return "XXX"
if row == 2:
return "XXX"
if cell_type == 1:
if row == 0:
return "X X"
if row == 1:
return "X X"
if row == 2:
return "XXX"
if cell_type == 2:
if row == 0:
return "XXX"
if row == 1:
return "X "
if row == 2:
return "XXX"
if cell_type == 3:
if row == 0:
return "XXX"
if row == 1:
return "X X"
if row == 2:
return "X X"
if cell_type == 4:
if row == 0:
return "XXX"
if row == 1:
return " X"
if row == 2:
return "XXX"
if cell_type == 5:
if row == 0:
return "X X"
if row == 1:
return " X"
if row == 2:
return "XXX"
if cell_type == 6:
if row == 0:
return "X X"
if row == 1:
return "X "
if row == 2:
return "XXX"
if cell_type == 7:
if row == 0:
return "XXX"
if row == 1:
return "X "
if row == 2:
return "X X"
if cell_type == 8:
if row == 0:
return "XXX"
if row == 1:
return " X"
if row == 2:
return "X X"
if cell_type == 9:
if row == 0:
return "XXX"
if row == 1:
return " "
if row == 2:
return "XXX"
if cell_type == 10:
if row == 0:
return "X X"
if row == 1:
return "X X"
if row == 2:
return "X X"
if cell_type == 0:
if row == 0:
return "XXX"
if row == 1:
return "XXX"
if row == 2:
return "XXX"
if cell_type == 11:
if row == 0:
return "X X"
if row == 1:
return " "
if row == 2:
return "XXX"
if cell_type == 12:
if row == 0:
return "XXX"
if row == 1:
return " "
if row == 2:
return "X X"
if cell_type == 13:
if row == 0:
return "X X"
if row == 1:
return " X"
if row == 2:
return "X X"
if cell_type == 14:
if row == 0:
return "X X"
if row == 1:
return "X "
if row == 2:
return "X X"
if cell_type == 15:
if row == 0:
return "X X"
if row == 1:
return " "
if row == 2:
return "X X"
| 24.37415
| 64
| 0.309936
| 763
| 7,166
| 3.003932
| 0.069463
| 0.222513
| 0.148342
| 0.17801
| 0.918412
| 0.918412
| 0.918412
| 0.918412
| 0.890052
| 0.873473
| 0
| 0.048693
| 0.572984
| 7,166
| 293
| 65
| 24.457338
| 0.649673
| 0
| 0
| 0.933333
| 0
| 0
| 0.04326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031373
| false
| 0.003922
| 0
| 0.015686
| 0.454902
| 0.007843
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
16e5c4e44af6767e286067fe7cf4395f1ef4ceb4
| 3,831
|
py
|
Python
|
userbot/modules/lyrics.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/lyrics.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/lyrics.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
#Encript Marshal By XVenom
#https://github.com/xvenom15
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00sx\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x02l\x02m\x03Z\x03\x01\x00d\x00d\x03l\x04m\x05Z\x05m\x06Z\x06m\x07Z\x07m\x08Z\x08\x01\x00d\x00d\x04l\tm\nZ\n\x01\x00e\x06d\x01k\trRe\x01\xa0\x0be\x06\xa1\x01Z\x0ce\x03d\x05d\x06d\x07\x8d\x02d\x08d\t\x84\x00\x83\x01Z\re\x05\xa0\x0ed\td\ni\x01\xa1\x01\x01\x00d\x01S\x00)\x0b\xe9\x00\x00\x00\x00N)\x01\xda\x08register)\x04\xda\x08CMD_HELP\xda\x06GENIUS\xda\x06lastfm\xda\x0fLASTFM_USERNAME)\x01\xda\x04UserTz\x1e^.lyrics (?:(now)|(.*) - (.*)))\x02Z\x08outgoingZ\x07patternc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00s\x90\x01\x00\x00|\x00\xa0\x00d\x01\xa1\x01I\x00d\x00H\x00\x01\x00t\x01d\x00k\x08r,|\x00\xa0\x00d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x03S\x00|\x00j\x02\xa0\x03d\x04\xa1\x01d\x05k\x02rxt\x04t\x05t\x06\x83\x02\xa0\x07\xa1\x00}\x01|\x01d\x00k\x08rf|\x00\xa0\x00d\x06\xa1\x01I\x00d\x00H\x00\x01\x00d\x03S\x00|\x01\xa0\x08\xa1\x00}\x02|\x01\xa0\t\xa1\x00}\x03n\x18|\x00j\x02\xa0\x03d\x07\xa1\x01}\x02|\x00j\x02\xa0\x03d\x08\xa1\x01}\x03|\x00\xa0\x00d\t|\x02\x9b\x00d\n|\x03\x9b\x00d\x0b\x9d\x05\xa1\x01I\x00d\x00H\x00\x01\x00t\n\xa0\x0b|\x03|\x02\xa1\x02}\x04|\x04d\x00k\x08r\xe4|\x00\xa0\x00d\x0c|\x02\x9b\x00d\n|\x03\x9b\x00d\r\x9d\x05\xa1\x01I\x00d\x00H\x00\x01\x00d\x03S\x00t\x0c|\x04j\r\x83\x01d\x0ek\x04\x90\x01rb|\x00\xa0\x00d\x0f\xa1\x01I\x00d\x00H\x00\x01\x00t\x0ed\x10d\x11\x83\x02\x8f$}\x05|\x05\xa0\x0fd\x12|\x02\x9b\x00d\n|\x03\x9b\x00d\x13|\x04j\r\x9b\x00\x9d\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00|\x00j\x10j\x11|\x00j\x12d\x10|\x00j\x13d\x14\x8d\x03I\x00d\x00H\x00\x01\x00t\x14\xa0\x15d\x10\xa1\x01\x01\x00d\x15S\x00|\x00\xa0\x00d\x16|\x02\x9b\x00d\x17|\x03\x9b\x00d\x18|\x04j\r\x9b\x00d\x19\x9d\x07\xa1\x01I\x00d\x00H\x00\x01\x00d\x15S\x00d\x00S\x00)\x1aNz\x18`Getting information...`z5`Provide genius access token to Heroku ConfigVars...`F\xe9\x01\x00\x00\x00Z\x03nowz-`No information current lastfm scrobbling...`\xe9\x02\x00\x00\x00\xe9\x03\x00\x00\x00z\x16`Searching lyrics for z\x03 - z\x04...`z\n`Song` **z\x12** `not found...`i\x00\x10\x00\x00z-`Lyrics is too big, view the file to see it.`z\nlyrics.txtz\x02w+z\x0fSearch query: \nz\x02\n\n)\x01Z\x08reply_toTz\x13**Search query**:\n`z\x05` - `z\x06`\n\n```z\x03```)\x16Z\x04editr\x04\x00\x00\x00Z\rpattern_match\xda\x05groupr\x07\x00\x00\x00r\x06\x00\x00\x00r\x05\x00\x00\x00Z\x0fget_now_playingZ\nget_artistZ\tget_title\xda\x06geniusZ\x0bsearch_song\xda\x03len\xda\x06lyrics\xda\x04open\xda\x05writeZ\x06clientZ\tsend_fileZ\x07chat_id\xda\x02id\xda\x02os\xda\x06remove)\x06Z\x05lyricZ\x07playingZ\x06artistZ\x04songZ\x05songs\xda\x01f\xa9\x00r\x15\x00\x00\x00\xda\x00r\x0e\x00\x00\x00\x12\x00\x00\x00sJ\x00\x00\x00\x00\x02\x10\x01\x08\x01\x04\x01\x02\xff\n\x02\x04\x01\x10\x01\x0e\x01\x08\x01\x04\x01\x02\xff\n\x03\x04\x01\x08\x01\n\x02\x0c\x01\x0c\x01\x1e\x01\x0c\x01\x08\x01\x1e\x01\x04\x01\x10\x01\x10\x01\x0c\x01(\x01\x06\x01\x04\x01\x02\x01\x04\xfd\x0c\x05\n\x01\x04\x02\x04\x01\x18\xff\n\x04r\x0e\x00\x00\x00z\xa5`.lyrics` **<artist name> - <song name>**\nUsage: Get lyrics matched artist and song.\n\n`.lyrics now`\nUsage: Get lyrics artist and song from current lastfm scrobbling.)\x0fr\x12\x00\x00\x00Z\x0clyricsgeniusZ\x0euserbot.eventsr\x02\x00\x00\x00Z\x07userbotr\x03\x00\x00\x00r\x04\x00\x00\x00r\x05\x00\x00\x00r\x06\x00\x00\x00Z\x06pylastr\x07\x00\x00\x00Z\x06Geniusr\x0c\x00\x00\x00r\x0e\x00\x00\x00\xda\x06updater\x15\x00\x00\x00r\x15\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00\xda\x08<module>\x07\x00\x00\x00s\x18\x00\x00\x00\x08\x01\x08\x02\x0c\x01\x18\x01\x0c\x02\x08\x01\n\x03\n\x01\n+\x04\x01\x02\x01\x02\xfe'))
| 957.75
| 3,760
| 0.758288
| 793
| 3,831
| 3.649433
| 0.287516
| 0.155494
| 0.118176
| 0.099516
| 0.193849
| 0.171389
| 0.149274
| 0.100207
| 0.050104
| 0.027989
| 0
| 0.309429
| 0.017228
| 3,831
| 4
| 3,760
| 957.75
| 0.45923
| 0.013573
| 0
| 0
| 0
| 0.5
| 0.988883
| 0.897565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
bc66f038bffcca87827d10b44873fa89c5aa6436
| 35,201
|
py
|
Python
|
saleor/graphql/attribute/tests/mutations/test_attribute_create.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 15,337
|
2015-01-12T02:11:52.000Z
|
2021-10-05T19:19:29.000Z
|
saleor/graphql/attribute/tests/mutations/test_attribute_create.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 7,486
|
2015-02-11T10:52:13.000Z
|
2021-10-06T09:37:15.000Z
|
saleor/graphql/attribute/tests/mutations/test_attribute_create.py
|
fairhopeweb/saleor
|
9ac6c22652d46ba65a5b894da5f1ba5bec48c019
|
[
"CC-BY-4.0"
] | 5,864
|
2015-01-16T14:52:54.000Z
|
2021-10-05T23:01:15.000Z
|
import pytest
from django.utils.text import slugify
from .....attribute.error_codes import AttributeErrorCode
from ....core.enums import MeasurementUnitsEnum
from ....tests.utils import get_graphql_content
from ...enums import AttributeEntityTypeEnum, AttributeInputTypeEnum, AttributeTypeEnum
CREATE_ATTRIBUTE_MUTATION = """
mutation createAttribute(
$input: AttributeCreateInput!
){
attributeCreate(input: $input) {
errors {
field
message
code
}
attribute {
name
slug
type
unit
inputType
entityType
filterableInStorefront
filterableInDashboard
availableInGrid
storefrontSearchPosition
choices(first: 10) {
edges {
node {
name
slug
value
file {
url
contentType
}
}
}
}
productTypes(first: 10) {
edges {
node {
id
}
}
}
}
}
}
"""
def test_create_attribute_and_attribute_values(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
name = "Value name"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]) == 1
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["choices"]["edges"][0]["node"]["name"] == name
assert data["attribute"]["choices"]["edges"][0]["node"]["slug"] == slugify(name)
def test_create_numeric_attribute_and_attribute_values(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "12.1"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"unit": MeasurementUnitsEnum.M.name,
"inputType": AttributeInputTypeEnum.NUMERIC.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["unit"] == MeasurementUnitsEnum.M.name
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.NUMERIC.name
assert data["attribute"]["filterableInStorefront"] is True
assert data["attribute"]["filterableInDashboard"] is True
assert data["attribute"]["availableInGrid"] is True
assert data["attribute"]["storefrontSearchPosition"] == 0
assert data["attribute"]["choices"]["edges"] == []
def test_create_numeric_attribute_and_attribute_values_not_numeric_value_provided(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Width"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"unit": MeasurementUnitsEnum.M.name,
"inputType": AttributeInputTypeEnum.NUMERIC.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = content["data"]["attributeCreate"]["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "values"
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
def test_create_swatch_attribute_and_attribute_values_only_name_provided(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Pink"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.SWATCH.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]) == 1
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["unit"] is None
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.SWATCH.name
assert data["attribute"]["filterableInStorefront"] is True
assert data["attribute"]["filterableInDashboard"] is True
assert data["attribute"]["availableInGrid"] is True
assert data["attribute"]["storefrontSearchPosition"] == 0
assert data["attribute"]["choices"]["edges"][0]["node"]["name"] == name
assert data["attribute"]["choices"]["edges"][0]["node"]["slug"] == slugify(name)
def test_create_swatch_attribute_and_attribute_values_with_file(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Logo"
file_url = "http://mirumee.com/test_media/test_logo.png"
content_type = "image/png"
variables = {
"input": {
"name": attribute_name,
"values": [
{"name": name, "fileUrl": file_url, "contentType": content_type}
],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.SWATCH.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 1
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["unit"] is None
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.SWATCH.name
assert data["attribute"]["filterableInStorefront"] is True
assert data["attribute"]["filterableInDashboard"] is True
assert data["attribute"]["availableInGrid"] is True
assert data["attribute"]["storefrontSearchPosition"] == 0
assert data["attribute"]["choices"]["edges"][0]["node"]["name"] == name
assert data["attribute"]["choices"]["edges"][0]["node"]["slug"] == slugify(name)
assert data["attribute"]["choices"]["edges"][0]["node"]["file"] == {
"url": file_url,
"contentType": content_type,
}
def test_create_swatch_attribute_and_attribute_values_with_value(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Pink"
value = "#ffc0cb"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name, "value": value}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.SWATCH.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 1
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["unit"] is None
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.SWATCH.name
assert data["attribute"]["filterableInStorefront"] is True
assert data["attribute"]["filterableInDashboard"] is True
assert data["attribute"]["availableInGrid"] is True
assert data["attribute"]["storefrontSearchPosition"] == 0
assert data["attribute"]["choices"]["edges"][0]["node"]["name"] == name
assert data["attribute"]["choices"]["edges"][0]["node"]["slug"] == slugify(name)
assert data["attribute"]["choices"]["edges"][0]["node"]["file"] is None
assert data["attribute"]["choices"]["edges"][0]["node"]["value"] == value
def test_create_swatch_attribute_and_attribute_values_file_and_value_provided(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Pink"
file_url = "http://mirumee.com/test_media/test_file.jpeg"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name, "value": "#A8A8A8", "fileUrl": file_url}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.SWATCH.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "values"
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
@pytest.mark.parametrize(
"field, value", [("fileUrl", "test.jpg"), ("value", "blue"), ("contentType", "jpg")]
)
def test_create_not_swatch_attribute_provide_not_valid_data(
field,
value,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example numeric attribute name"
name = "Test"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name, field: value}],
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.DROPDOWN.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "values"
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
def test_create_attribute_with_file_input_type(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.FILE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 0
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.FILE.name
@pytest.mark.parametrize(
"entity_type",
[AttributeEntityTypeEnum.PAGE.name, AttributeEntityTypeEnum.PRODUCT.name],
)
def test_create_attribute_with_reference_input_type(
entity_type,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.REFERENCE.name,
"entityType": entity_type,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 0
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.REFERENCE.name
assert data["attribute"]["entityType"] == entity_type
def test_create_attribute_with_reference_input_type_entity_type_not_given(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.REFERENCE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "entityType"
assert errors[0]["code"] == AttributeErrorCode.REQUIRED.name
def test_create_page_attribute_and_attribute_values(
staff_api_client,
permission_manage_page_types_and_attributes,
permission_manage_pages,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
name = "Value name"
variables = {
"input": {
"name": attribute_name,
"values": [{"name": name}],
"type": AttributeTypeEnum.PAGE_TYPE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_page_types_and_attributes,
permission_manage_pages,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
assert data["attribute"]["filterableInStorefront"] is False
assert data["attribute"]["filterableInDashboard"] is False
assert data["attribute"]["availableInGrid"] is False
assert data["attribute"]["storefrontSearchPosition"] == 0
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 1
assert data["attribute"]["type"] == AttributeTypeEnum.PAGE_TYPE.name
assert data["attribute"]["choices"]["edges"][0]["node"]["name"] == name
assert data["attribute"]["choices"]["edges"][0]["node"]["slug"] == slugify(name)
def test_create_attribute_with_file_input_type_and_values(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
name = "Value name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"values": [{"name": name}],
"inputType": AttributeInputTypeEnum.FILE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "values"
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
def test_create_attribute_with_file_input_type_correct_attribute_settings(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.FILE.name,
"filterableInStorefront": False,
"filterableInDashboard": False,
"availableInGrid": False,
"storefrontSearchPosition": 0,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
assert not content["data"]["attributeCreate"]["errors"]
data = content["data"]["attributeCreate"]
# Check if the attribute was correctly created
assert data["attribute"]["name"] == attribute_name
assert data["attribute"]["slug"] == slugify(
attribute_name
), "The default slug should be the slugified name"
assert (
data["attribute"]["productTypes"]["edges"] == []
), "The attribute should not have been assigned to a product type"
# Check if the attribute values were correctly created
assert len(data["attribute"]["choices"]["edges"]) == 0
assert data["attribute"]["type"] == AttributeTypeEnum.PRODUCT_TYPE.name
assert data["attribute"]["inputType"] == AttributeInputTypeEnum.FILE.name
def test_create_attribute_with_file_input_type_and_invalid_settings(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.FILE.name,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
"storefrontSearchPosition": 1,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 4
assert {error["field"] for error in errors} == {
"filterableInStorefront",
"filterableInDashboard",
"availableInGrid",
"storefrontSearchPosition",
}
assert {error["code"] for error in errors} == {AttributeErrorCode.INVALID.name}
@pytest.mark.parametrize(
"entity_type",
[AttributeEntityTypeEnum.PAGE.name, AttributeEntityTypeEnum.PRODUCT.name],
)
def test_create_attribute_with_reference_input_type_invalid_settings(
entity_type,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.REFERENCE.name,
"entityType": entity_type,
"filterableInStorefront": True,
"filterableInDashboard": True,
"availableInGrid": True,
"storefrontSearchPosition": 1,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 4
assert {error["field"] for error in errors} == {
"filterableInStorefront",
"filterableInDashboard",
"availableInGrid",
"storefrontSearchPosition",
}
assert {error["code"] for error in errors} == {AttributeErrorCode.INVALID.name}
@pytest.mark.parametrize(
"field, value",
[
("filterableInStorefront", True),
("filterableInDashboard", True),
("availableInGrid", True),
("storefrontSearchPosition", 4),
],
)
def test_create_attribute_with_file_input_type_and_invalid_one_settings_value(
field,
value,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.FILE.name,
field: value,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == field
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
@pytest.mark.parametrize(
"field, value",
[
("filterableInStorefront", True),
("filterableInDashboard", True),
("availableInGrid", True),
("storefrontSearchPosition", 4),
],
)
def test_create_attribute_with_reference_input_type_invalid_one_settings_value(
field,
value,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.REFERENCE.name,
"entityType": AttributeEntityTypeEnum.PAGE.name,
field: value,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == field
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
@pytest.mark.parametrize(
"entity_type",
[AttributeEntityTypeEnum.PAGE.name, AttributeEntityTypeEnum.PRODUCT.name],
)
def test_create_attribute_with_reference_input_type_values_given(
entity_type,
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
attribute_name = "Example name"
variables = {
"input": {
"name": attribute_name,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"inputType": AttributeInputTypeEnum.REFERENCE.name,
"entityType": entity_type,
"values": [{"name": "test-value"}],
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
errors = data["errors"]
assert not data["attribute"]
assert len(errors) == 1
assert errors[0]["field"] == "values"
assert errors[0]["code"] == AttributeErrorCode.INVALID.name
@pytest.mark.parametrize(
"input_slug, expected_slug",
(
("my-slug", "my-slug"),
(None, "my-name"),
("", "my-name"),
("わたし-わ-にっぽん-です", "わたし-わ-にっぽん-です"),
),
)
def test_create_attribute_with_given_slug(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
input_slug,
expected_slug,
):
# given
staff_api_client.user.user_permissions.add(
permission_manage_product_types_and_attributes
)
query = """
mutation createAttribute(
$name: String!, $slug: String, $type: AttributeTypeEnum!) {
attributeCreate(input: {name: $name, slug: $slug, type: $type}) {
errors {
field
message
code
}
attribute {
slug
}
}
}
"""
attribute_name = "My Name"
variables = {
"name": attribute_name,
"slug": input_slug,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
}
# when
content = get_graphql_content(staff_api_client.post_graphql(query, variables))
# then
assert not content["data"]["attributeCreate"]["errors"]
assert content["data"]["attributeCreate"]["attribute"]["slug"] == expected_slug
def test_create_attribute_value_name_and_slug_with_unicode(
staff_api_client,
permission_manage_product_types_and_attributes,
permission_manage_products,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
name = "わたし わ にっぽん です"
slug = "わたし-わ-にっぽん-で"
variables = {
"input": {
"name": name,
"slug": slug,
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
data = content["data"]["attributeCreate"]
assert not data["errors"]
assert data["attribute"]["name"] == name
assert data["attribute"]["slug"] == slug
@pytest.mark.parametrize(
"name_1, name_2, error_msg, error_code",
(
(
"Red color",
"Red color",
"Provided values are not unique.",
AttributeErrorCode.UNIQUE,
),
(
"Red color",
"red color",
"Provided values are not unique.",
AttributeErrorCode.UNIQUE,
),
),
)
def test_create_attribute_and_attribute_values_errors(
staff_api_client,
name_1,
name_2,
error_msg,
error_code,
permission_manage_product_types_and_attributes,
permission_manage_products,
product_type,
):
# given
query = CREATE_ATTRIBUTE_MUTATION
variables = {
"input": {
"name": "Example name",
"type": AttributeTypeEnum.PRODUCT_TYPE.name,
"values": [{"name": name_1}, {"name": name_2}],
}
}
# when
response = staff_api_client.post_graphql(
query,
variables,
permissions=[
permission_manage_product_types_and_attributes,
permission_manage_products,
],
)
# then
content = get_graphql_content(response)
errors = content["data"]["attributeCreate"]["errors"]
assert errors
assert errors[0]["field"] == "values"
assert errors[0]["message"] == error_msg
assert errors[0]["code"] == error_code.name
| 30.503466
| 88
| 0.629812
| 3,341
| 35,201
| 6.392098
| 0.046693
| 0.06209
| 0.074733
| 0.056378
| 0.922645
| 0.900496
| 0.888884
| 0.880642
| 0.860695
| 0.836486
| 0
| 0.00295
| 0.258459
| 35,201
| 1,153
| 89
| 30.529922
| 0.815194
| 0.035027
| 0
| 0.767417
| 0
| 0.001072
| 0.235841
| 0.036508
| 0
| 0
| 0
| 0
| 0.158628
| 1
| 0.02358
| false
| 0
| 0.006431
| 0
| 0.030011
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcb48076815bacc2b38ca1ae9db58d4659e7f7b4
| 9,023
|
py
|
Python
|
integration_tests/test_feegrant.py
|
macong-cdc/chain-main
|
3158a195d4ce787af2f5abea5cbfa6ba686ca89d
|
[
"Apache-2.0"
] | 311
|
2021-02-23T16:38:21.000Z
|
2022-03-31T17:08:36.000Z
|
integration_tests/test_feegrant.py
|
macong-cdc/chain-main
|
3158a195d4ce787af2f5abea5cbfa6ba686ca89d
|
[
"Apache-2.0"
] | 344
|
2021-02-23T01:30:58.000Z
|
2022-03-30T20:06:06.000Z
|
integration_tests/test_feegrant.py
|
macong-cdc/chain-main
|
3158a195d4ce787af2f5abea5cbfa6ba686ca89d
|
[
"Apache-2.0"
] | 196
|
2021-02-25T17:28:03.000Z
|
2022-03-29T20:49:48.000Z
|
import datetime
import pytest
from dateutil.parser import isoparse
from .utils import (
BASECRO_DENOM,
SUCCESS_CODE,
grant_fee_allowance,
query_block_info,
revoke_fee_grant,
transfer,
wait_for_block,
wait_for_block_time,
)
pytestmark = pytest.mark.normal
def test_basic_fee_allowance(cluster):
"""
check basic fee allowance with no limit or grant expiry
"""
transaction_coins = 100
fee_coins = 10
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
fee_granter_balance = cluster.balance(fee_granter_address)
fee_grantee_balance = cluster.balance(fee_grantee_address)
receiver_balance = cluster.balance(receiver_address)
grant_fee_allowance(cluster, fee_granter_address, fee_grantee_address)
transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
assert cluster.balance(fee_granter_address) == fee_granter_balance - fee_coins
assert (
cluster.balance(fee_grantee_address) == fee_grantee_balance - transaction_coins
)
assert cluster.balance(receiver_address) == receiver_balance + transaction_coins
def test_tx_failed_when_exceeds_grant_fee(cluster):
"""
check transaction should fail when tx fee exceeds fee limit in basic fee allowance
"""
transaction_coins = 100
fee_coins = 10
fee_grant_spend_limit = 5
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
fee_granter_balance = cluster.balance(fee_granter_address)
fee_grantee_balance = cluster.balance(fee_grantee_address)
receiver_balance = cluster.balance(receiver_address)
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
grant_fee_allowance(
cluster,
fee_granter_address,
fee_grantee_address,
spend_limit="%s%s" % (fee_grant_spend_limit, BASECRO_DENOM),
)
tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
assert tx["code"] != SUCCESS_CODE, "should fail as fee limit exceeded"
assert cluster.balance(fee_granter_address) == fee_granter_balance
assert cluster.balance(fee_grantee_address) == fee_grantee_balance
assert cluster.balance(receiver_address) == receiver_balance
def test_tx_failed_after_grant_expiration(cluster):
"""
check transaction should fail when tx happens after grant expiry
"""
transaction_coins = 100
fee_coins = 10
# RFC 3339 timestamp
grant_expiration = datetime.datetime.utcnow().isoformat() + "Z"
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
fee_granter_balance = cluster.balance(fee_granter_address)
fee_grantee_balance = cluster.balance(fee_grantee_address)
receiver_balance = cluster.balance(receiver_address)
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
grant_fee_allowance(
cluster, fee_granter_address, fee_grantee_address, expiration=grant_expiration
)
tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
assert tx["code"] != SUCCESS_CODE, "should fail as fee allowance expired"
assert cluster.balance(fee_granter_address) == fee_granter_balance
assert cluster.balance(fee_grantee_address) == fee_grantee_balance
assert cluster.balance(receiver_address) == receiver_balance
def test_periodic_fee_allowance(cluster):
"""
check periodic fee allowance with no expiration
"""
transaction_coins = 100
fee_coins = 10
period = 5
period_limit = 11
number_of_periods = 3
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
fee_granter_balance = cluster.balance(fee_granter_address)
fee_grantee_balance = cluster.balance(fee_grantee_address)
receiver_balance = cluster.balance(receiver_address)
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
grant_fee_allowance(
cluster,
fee_granter_address,
fee_grantee_address,
period_limit="%s%s" % (period_limit, BASECRO_DENOM),
period=period,
)
for _ in range(number_of_periods):
tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
wait_for_block(cluster, int(tx["height"]))
block_info = query_block_info(cluster, tx["height"])
wait_for_block_time(
cluster,
isoparse(block_info["block"]["header"]["time"])
+ datetime.timedelta(seconds=period),
)
assert (
cluster.balance(fee_granter_address)
== fee_granter_balance - fee_coins * number_of_periods
)
assert (
cluster.balance(fee_grantee_address)
== fee_grantee_balance - transaction_coins * number_of_periods
)
assert (
cluster.balance(receiver_address)
== receiver_balance + transaction_coins * number_of_periods
)
def test_exceed_period_limit_should_not_affect_the_next_period(cluster):
"""
check exceeding periodic fee should not affect next period
"""
transaction_coins = 100
fee_coins = 10
period = 5
period_limit = 11
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
fee_granter_balance = cluster.balance(fee_granter_address)
fee_grantee_balance = cluster.balance(fee_grantee_address)
receiver_balance = cluster.balance(receiver_address)
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
grant_fee_allowance(
cluster,
fee_granter_address,
fee_grantee_address,
period_limit="%s%s" % (period_limit, BASECRO_DENOM),
period=period,
)
tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
failed_tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
assert failed_tx["code"] != SUCCESS_CODE, "should fail as fee exceeds period limit"
wait_for_block(cluster, int(tx["height"]))
block_info = query_block_info(cluster, tx["height"])
wait_for_block_time(
cluster,
isoparse(block_info["block"]["header"]["time"])
+ datetime.timedelta(seconds=period),
)
transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
# transaction only happened two times
assert cluster.balance(fee_granter_address) == fee_granter_balance - fee_coins * 2
assert (
cluster.balance(fee_grantee_address)
== fee_grantee_balance - transaction_coins * 2
)
assert cluster.balance(receiver_address) == receiver_balance + transaction_coins * 2
def test_revoke_fee_grant(cluster):
"""
check tx should fail after fee grant is revoked
"""
transaction_coins = 100
fee_coins = 10
fee_granter_address = cluster.address("community")
fee_grantee_address = cluster.address("ecosystem")
receiver_address = cluster.address("reserve")
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
grant_fee_allowance(cluster, fee_granter_address, fee_grantee_address)
revoke_fee_grant(cluster, fee_granter_address, fee_grantee_address)
failed_tx = transfer(
cluster,
fee_grantee_address,
receiver_address,
"%s%s" % (transaction_coins, BASECRO_DENOM),
fees="%s%s" % (fee_coins, BASECRO_DENOM),
fee_account=fee_granter_address,
)
assert failed_tx["code"] != SUCCESS_CODE, "should fail as grant is revoked"
| 31.439024
| 88
| 0.69744
| 1,054
| 9,023
| 5.591082
| 0.093928
| 0.078059
| 0.103852
| 0.074665
| 0.846598
| 0.846258
| 0.843713
| 0.819956
| 0.810792
| 0.777193
| 0
| 0.006355
| 0.215228
| 9,023
| 286
| 89
| 31.548951
| 0.825872
| 0.045883
| 0
| 0.72093
| 0
| 0
| 0.051192
| 0
| 0
| 0
| 0
| 0
| 0.088372
| 1
| 0.027907
| false
| 0
| 0.018605
| 0
| 0.046512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c12180cc5e4007eef35149d812a1c662453b628
| 90,316
|
py
|
Python
|
temperature_scaling.py
|
liorfrenkel1992/focal_calibration
|
cdfba52eb35b5bc3acc21532861f75940a933b1c
|
[
"MIT"
] | null | null | null |
temperature_scaling.py
|
liorfrenkel1992/focal_calibration
|
cdfba52eb35b5bc3acc21532861f75940a933b1c
|
[
"MIT"
] | null | null | null |
temperature_scaling.py
|
liorfrenkel1992/focal_calibration
|
cdfba52eb35b5bc3acc21532861f75940a933b1c
|
[
"MIT"
] | null | null | null |
'''
Code to perform temperature scaling. Adapted from https://github.com/gpleiss/temperature_scaling
'''
from numpy.core.numeric import cross
import torch
import numpy as np
from torch import nn, optim
from torch.nn import functional as F
import math
from Metrics.metrics import test_classification_net_logits
from Metrics.metrics import ECELoss, ClassECELoss, posnegECELoss, estECELoss
from Metrics.metrics2 import ECE, softmax, test_classification_net_logits2
torch.set_printoptions(precision=10)
class ModelWithTemperature(nn.Module):
"""
A thin decorator, which wraps a model with temperature scaling
model (nn.Module):
A classification neural network
NB: Output of the neural network should be the classification logits,
NOT the softmax (or log softmax)!
"""
def __init__(self, model, log=True, const_temp=False, bins_temp=False, n_bins=15, iters=1, dists=False, grid=True):
super(ModelWithTemperature, self).__init__()
self.model = model
self.temperature = 1.0
self.log = log
self.const_temp = const_temp
self.ece_list = []
self.ece = 0.0
self.bins_temp = bins_temp
self.dists = dists
self.n_bins = n_bins
self.iters = iters # Number of maximum iterations
self.bin_boundaries = torch.linspace(0, 1, n_bins + 1).unsqueeze(0).repeat((iters, 1)).numpy()
self.best_iter = 0 # Best iteration for scaling
self.temps_iters = torch.ones(iters).cuda() # Temperatures fot iter single TS
self.grid = grid # Applying grid search to find optimal weight
def forward(self, input, labels, const_temp=False, bins_temp=False):
logits = self.model(input)
if self.const_temp or const_temp:
return self.temperature_scale(logits)
#return self.iter_temperature_scale(logits)
elif bins_temp:
return self.bins_temperature_scale_test(logits, labels.cuda(), n_bins=self.n_bins)
elif self.dists:
return self.single_dists_test(logits)
else:
return self.class_temperature_scale(logits)
def temperature_scale(self, logits):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / self.temperature
def iter_temperature_scale(self, logits):
"""
Perform iterative temperature scaling on logits
"""
scaled_logits = logits.clone()
for i in range(self.iters):
scaled_logits = scaled_logits / self.temps_iters[i]
# Expand temperature to match the size of logits
return scaled_logits
def class_temperature_scale(self, logits):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / self.csece_temperature
def bins_temperature_scale_test(self, logits, labels, n_bins=15):
"""
Perform temperature scaling on logits
"""
ece_criterion = ECELoss(n_bins=n_bins).cuda()
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
# confidences[confidences == 1] = 0.999999
scaled_logits = logits.clone()
ece_list = []
for i in range(self.best_iter + 1):
bin = 0
bin_lowers = self.bin_boundaries[i][:-1]
bin_uppers = self.bin_boundaries[i][1:]
print('\n')
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
accuracies_temp = accuracies[in_bin]
# accuracy_in_bin = accuracies_temp.float().mean().item()
accuracy_in_bin = min(accuracies_temp.float().mean().item(), 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
if any(in_bin):
scaled_logits[in_bin] = scaled_logits[in_bin] / self.bins_T[bin, i]
softmaxes_temp = F.softmax(scaled_logits[in_bin], dim=1)
confidences_temp, _ = torch.max(softmaxes_temp, 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
samples = scaled_logits[in_bin].shape[0]
print('ece in bin ', bin + 1, ' :', (prop_in_bin * after_temperature).item(),
', number of samples: ', samples)
print('accuracy in bin ', bin + 1, ': ', accuracy_in_bin)
bin += 1
ece_list.append(ece_criterion(scaled_logits, labels).item())
softmaxes = F.softmax(scaled_logits, dim=1)
confidences, _ = torch.max(softmaxes, 1)
print(ece_list)
print('Number of iters: {}'.format(self.best_iter + 1))
return scaled_logits
def bins_dists_scale_test(self, logits, labels, n_bins=15):
"""
Perform temperature scaling on logits
"""
ece_criterion = ECELoss(n_bins=n_bins).cuda()
new_softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(new_softmaxes, 1)
accuracies = predictions.eq(labels)
# confidences[confidences > 0.99999] = 0.99999
ece_list = []
n_classes = logits.shape[1]
for i in range(self.best_iter + 1):
bin = 0
bin_lowers = self.bin_boundaries[i][:-1]
bin_uppers = self.bin_boundaries[i][1:]
print('\n')
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
accuracy_in_bin = min(accuracies_temp.float().mean().item(), 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
if any(in_bin):
weight = self.bins_T[bin, i]
new_softmaxes[in_bin] = weight * new_softmaxes[in_bin] + (1 - weight) * 1 / n_classes
confidences_temp, _ = torch.max(new_softmaxes[in_bin], 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
samples = new_softmaxes[in_bin].shape[0]
print('ece in bin ', bin + 1, ' :', (prop_in_bin * after_temperature).item(),
', number of samples: ', samples)
print('accuracy in bin ', bin + 1, ': ', origin_accuracy_in_bin)
bin += 1
new_softmaxes /= torch.sum(new_softmaxes, 1, keepdim=True)
ece_list.append(ece_criterion(new_softmaxes, labels, is_logits=False).item())
confidences, _ = torch.max(new_softmaxes, 1)
print(ece_list)
print('Number of iters: {}'.format(self.best_iter + 1))
return new_softmaxes
def single_dists_test(self, logits):
"""
Perform temperature scaling on logits
"""
softmaxes = F.softmax(logits, 1)
# confidence, predictions = torch.max(softmaxes, 1, keepdim=True)
# sorted_conf, sorted_pred = torch.sort(softmaxes, 1, True)
n_classes = logits.shape[1]
# softmaxes = softmaxes - self.weight * dists
softmaxes = self.weight * softmaxes + (1 - self.weight) * 1 / n_classes
# conf = sorted_conf[:, 0]
# second_conf = sorted_conf[:, 1]
# new_values = (1 - self.weight) * conf + self.weight * second_conf
# softmaxes.scatter_(1, predictions, new_values.unsqueeze(-1))
# softmaxes = self.weight * softmaxes
softmaxes /= torch.sum(softmaxes, 1, keepdim=True)
return softmaxes
def bins_temperature_scale(self, logits):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / torch.unsqueeze(self.bece_temperature, -1)
def set_temperature(self, valid_loader, cross_validate='ece', init_temp=2.5, acc_check=False):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
if self.const_temp:
self.cuda()
self.model.eval()
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in valid_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if self.log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_nll = nll_criterion(self.temperature_scale(logits), labels).item()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
if cross_validate == 'ece':
self.temperature = T_opt_ece
else:
self.temperature = T_opt_nll
self.cuda()
# Calculate NLL and ECE after temperature scaling
after_temperature_nll = nll_criterion(self.temperature_scale(logits), labels).item()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % self.temperature)
print('After temperature - NLL: %.3f, ECE: %.3f' % (after_temperature_nll, after_temperature_ece))
else:
self.cuda()
self.model.eval()
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
csece_criterion = ClassECELoss().cuda()
posneg_csece_criterion = posnegECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in valid_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
before_temperature_ece = ece_criterion(logits, labels).item()
if self.log:
print('Before temperature - ECE: %.3f' % (before_temperature_ece))
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
self.temperature = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
"""
Find tempearature vector for the model (using the validation set) with cross-validation on ECE
"""
T_opt_nll = 1.0
T_opt_ece = 1.0
T_opt_csece = init_temp*torch.ones(logits.size()[1]).cuda()
T_csece = init_temp*torch.ones(logits.size()[1]).cuda()
self.csece_temperature = T_csece
self.ece_list.append(ece_criterion(self.class_temperature_scale(logits), labels).item())
_, accuracy, _, _, _ = test_classification_net_logits(logits, labels)
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(self.class_temperature_scale(logits), labels)
if temp_accuracy >= accuracy:
accuracy = temp_accuracy
steps_limit = 0.2
temp_steps = torch.linspace(-steps_limit, steps_limit, int((2 * steps_limit) / 0.1 + 1))
converged = False
prev_temperatures = self.csece_temperature.clone()
nll_val = 10 ** 7
ece_val = 10 ** 7
csece_val = 10 ** 7
#for iter in range(self.iters):
while not converged:
for label in range(logits.size()[1]):
init_temp_value = T_csece[label].item()
#T = 0.1
#for i in range(100):
for step in temp_steps:
#T_csece[label] = T
T_csece[label] = init_temp_value + step
self.csece_temperature = T_csece
#self.temperature = T
self.cuda()
#after_temperature_nll = nll_criterion(self.temperature_scale(logits), labels).item()
after_temperature_ece = ece_criterion(self.class_temperature_scale(logits), labels).item()
#after_temperature_ece_reg = ece_criterion(self.temperature_scale(logits), labels).item()
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(self.class_temperature_scale(logits), labels)
if acc_check:
if csece_val > after_temperature_ece and temp_accuracy >= accuracy:
T_opt_csece[label] = T
csece_val = after_temperature_ece
accuracy = temp_accuracy
else:
if csece_val > after_temperature_ece:
#T_opt_csece[label] = T
T_opt_csece[label] = init_temp_value + step
csece_val = after_temperature_ece
#T += 0.1
T_csece[label] = T_opt_csece[label]
self.csece_temperature = T_opt_csece
self.ece_list.append(ece_criterion(self.class_temperature_scale(logits), labels).item())
converged = torch.all(self.csece_temperature.eq(prev_temperatures))
prev_temperatures = self.csece_temperature.clone()
self.csece_temperature = T_opt_csece
self.cuda()
return self
def get_temperature(self):
if self.const_temp:
return self.temperature
elif self.bins_temp:
return self.temperature, self.bins_T
elif self.dists:
return self.weight
else:
return self.temperature, self.csece_temperature
def set_bins_temperature(self, valid_loader, cross_validate='ece', init_temp=2.5, acc_check=False, n_bins=15):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
self.cuda()
self.model.eval()
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in valid_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if self.log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
eps = 1e-6
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
self.temperature = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
T_opt_bece = init_temp*torch.ones(logits.shape[0]).cuda()
T_bece = init_temp*torch.ones(logits.shape[0]).cuda()
self.bins_T = init_temp*torch.ones(n_bins).cuda()
#bins_T_opt = init_temp*torch.ones(n_bins).cuda()
self.bece_temperature = T_bece
self.ece_list.append(ece_criterion(self.temperature_scale(logits), labels).item())
_, accuracy, _, _, _ = test_classification_net_logits(logits, labels)
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(self.temperature_scale(logits), labels)
if temp_accuracy >= accuracy:
accuracy = temp_accuracy
softmaxes = F.softmax(logits, dim=1)
confidences, _ = torch.max(softmaxes, 1)
steps_limit = 0.2
temp_steps = torch.linspace(-steps_limit, steps_limit, int((2 * steps_limit) / 0.1 + 1)).cuda()
converged = False
prev_temperatures = self.bece_temperature.clone()
#prev_temperatures = self.bins_T.clone()
bece_val = 10 ** 7
bin_boundaries = torch.linspace(0, 1, n_bins + 1)
bin_lowers = bin_boundaries[:-1]
bin_uppers = bin_boundaries[1:]
self.iters = 0
while not converged:
self.iters += 1
bin = 0
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
#prop_in_bin = in_bin.float().mean()
if any(in_bin):
init_temp_value = T_bece[in_bin][0].item()
#init_temp_value = self.bins_T[bin].item()
for step in temp_steps:
T_bece[in_bin] = init_temp_value + step
self.bece_temperature = T_bece
#self.bins_T[bin] = init_temp_value + step
self.cuda()
after_temperature_ece = ece_criterion(self.bins_temperature_scale(logits), labels).item()
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(self.bins_temperature_scale(logits), labels)
if acc_check:
if bece_val > after_temperature_ece + eps and temp_accuracy >= accuracy:
T_opt_bece[in_bin] = init_temp_value + step
#bins_T_opt[bin] = init_temp_value + step
bece_val = after_temperature_ece
accuracy = temp_accuracy
else:
if bece_val > after_temperature_ece + eps:
T_opt_bece[in_bin] = init_temp_value + step
#bins_T_opt[bin] = init_temp_value + step
bece_val = after_temperature_ece
T_bece[in_bin] = T_opt_bece[in_bin]
#self.bins_T[bin] = bins_T_opt[bin]
self.bins_T[bin] = T_bece[in_bin][0].item()
bin += 1
self.bece_temperature = T_opt_bece
#self.bins_T = bins_T_opt
self.ece_list.append(ece_criterion(self.bins_temperature_scale(logits), labels).item())
converged = torch.all(self.bece_temperature.eq(prev_temperatures))
prev_temperatures = self.bece_temperature.clone()
self.bece_temperature = T_opt_bece
#self.bins_T = bins_T_opt
self.cuda()
return self
def histedges_equalN(self, x):
npt = len(x)
return np.interp(np.linspace(0, npt, self.n_bins + 1),
np.arange(npt),
np.sort(x))
def set_bins_temperature2(self, valid_loader, cross_validate='ece', init_temp=2.5, acc_check=False, top_temp=10):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
self.cuda()
self.model.eval()
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in valid_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if self.log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
n_bins = self.n_bins
eps = 1e-6
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
self.temperature = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
init_temp = 1
T_opt_bece = init_temp*torch.ones(logits.shape[0]).cuda()
T_bece = init_temp*torch.ones(logits.shape[0]).cuda()
self.bins_T = init_temp*torch.ones((n_bins, self.iters)).cuda()
self.bece_temperature = T_bece
self.ece_list.append(ece_criterion(self.temperature_scale(logits), labels).item())
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
for i in range(self.iters):
ece_in_iter = 0
print('iter num ', i+1)
bin = 0
few_examples = dict()
n, self.bin_boundaries[i] = np.histogram(confidences.cpu().detach(), self.histedges_equalN(confidences.cpu().detach()))
bin_lowers = self.bin_boundaries[i][:-1]
bin_uppers = self.bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
bece_val = 10 ** 7
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
if confidences[in_bin].shape[0] < 20:
samples = T_bece[in_bin].shape[0]
print('number of samples in bin {0}: {1}'.format(bin + 1, samples))
few_examples[bin] = samples
bin += 1
continue
if any(in_bin):
init_temp_value = T_bece[in_bin][0].item()
T = 0.1
accuracies_temp = accuracies[in_bin]
accuracy_in_bin = min(accuracies_temp.float().mean().item(), 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
for t in range(100):
T_bece[in_bin] = T
self.bece_temperature = T_bece
softmaxes_temp = F.softmax(logits[in_bin] / torch.unsqueeze(T_bece[in_bin], -1), dim=1)
confidences_temp, _ = torch.max(softmaxes_temp, 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
if bece_val > after_temperature + eps:
T_opt_bece[in_bin] = T
bece_val = after_temperature
#print('conf-acc: ', (avg_confidence_in_bin - accuracy_in_bin).item())
#print('temp: ', T)
T += 0.1
T_bece[in_bin] = T_opt_bece[in_bin]
self.bins_T[bin, i] = T_opt_bece[in_bin][0].item()
samples = T_bece[in_bin].shape[0]
ece_in_iter += prop_in_bin * bece_val
print('ece in bin ', bin+1, ' :', (prop_in_bin * bece_val).item(), ', number of samples: ', samples)
bin += 1
for bin in few_examples:
#bins_T[bin, i] = self.temperature
if bin > 0 and bin < n_bins - 1:
lower_bin = bin - 1
upper_bin = bin + 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
if upper_bin == n_bins - 1:
self.bins_T[bin, i] = self.bins_T[lower_bin, i]
else:
avg_temp = (self.bins_T[lower_bin, i] + self.bins_T[upper_bin, i]) / 2 # Mean temperature of neighbors
self.bins_T[bin, i] = avg_temp
elif bin == 0:
upper_bin = bin + 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
self.bins_T[bin, i] = self.bins_T[upper_bin, i]
else:
lower_bin = bin - 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
self.bins_T[bin, i] = self.bins_T[lower_bin, i]
self.bece_temperature = T_opt_bece
current_ece = ece_criterion(self.bins_temperature_scale(logits), labels).item()
print('ece in iter ', i + 1, ' :', current_ece)
if i > 0 and current_ece < self.ece_list[self.best_iter]:
self.best_iter = i
if abs(self.ece_list[-1] - current_ece) > eps:
self.ece_list.append(current_ece)
else:
self.iters = i + 1
break
logits = logits / torch.unsqueeze(self.bece_temperature, -1)
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
self.bece_temperature = T_opt_bece
return self
def set_bins_dists(self, valid_loader, cross_validate='ece', init_temp=2.5, acc_check=False, top_temp=10):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
self.cuda()
self.model.eval()
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in valid_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if self.log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
n_bins = self.n_bins
eps = 1e-6
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
self.temperature = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
init_temp = 1
T_opt_bece = init_temp*torch.ones(logits.shape[0]).cuda()
T_bece = init_temp*torch.ones(logits.shape[0]).cuda()
self.bins_T = init_temp*torch.ones((n_bins, self.iters)).cuda()
self.bece_temperature = T_bece
self.ece_list.append(ece_criterion(self.temperature_scale(logits), labels).item())
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
# confidences[confidences > 0.99999] = 0.99999
accuracies = predictions.eq(labels)
n_classes = logits.shape[1]
for i in range(self.iters):
ece_in_iter = 0
print('iter num ', i+1)
bin = 0
few_examples = dict()
n, self.bin_boundaries[i] = np.histogram(confidences.cpu().detach(), self.histedges_equalN(confidences.cpu().detach()))
bin_lowers = self.bin_boundaries[i][:-1]
bin_uppers = self.bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
bece_val = 10 ** 7
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
if confidences[in_bin].shape[0] < 20:
samples = T_bece[in_bin].shape[0]
print('number of samples in bin {0}: {1}'.format(bin + 1, samples))
few_examples[bin] = samples
bin += 1
continue
if any(in_bin):
init_temp_value = T_bece[in_bin][0].item()
origin_avg_confidence_in_bin = confidences[in_bin].mean()
origin_accuracy_in_bin = accuracies[in_bin].float().mean().item()
accuracy_in_bin = min(origin_accuracy_in_bin, 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
if self.grid:
T = 0.1
for t in range(100):
temp_softmaxes = softmaxes.clone()
T_bece[in_bin] = T
temp_softmaxes[in_bin] = T * temp_softmaxes[in_bin] + (1 - T) * 1 / n_classes
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
confidences_temp, _ = torch.max(temp_softmaxes[in_bin], 1)
self.bece_temperature = T_bece
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
if bece_val > after_temperature + eps:
T_opt_bece[in_bin] = T
bece_val = after_temperature
#print('conf-acc: ', (avg_confidence_in_bin - accuracy_in_bin).item())
#print('temp: ', T)
T += 0.1
else:
weight_i = (origin_accuracy_in_bin - 1/n_classes) / (origin_avg_confidence_in_bin - 1/n_classes)
weight_i = max(0, min(1, weight_i.item()))
T_opt_bece[in_bin] = weight_i
# Compute bin's updated ECE
temp_softmaxes = softmaxes.clone()
temp_softmaxes[in_bin] = weight_i * temp_softmaxes[in_bin] + (1 - weight_i) * 1 / n_classes
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
confidences_temp, _ = torch.max(temp_softmaxes[in_bin], 1)
avg_confidence_in_bin = confidences_temp.mean()
bece_val = torch.abs(origin_accuracy_in_bin - avg_confidence_in_bin)
T_bece[in_bin] = T_opt_bece[in_bin]
self.bins_T[bin, i] = T_opt_bece[in_bin][0].item()
samples = T_bece[in_bin].shape[0]
ece_in_iter += prop_in_bin * bece_val
print('ece in bin ', bin+1, ' :', (prop_in_bin * bece_val).item(), ', number of samples: ', samples)
bin += 1
for bin in few_examples:
#bins_T[bin, i] = self.temperature
if bin > 0 and bin < n_bins - 1:
lower_bin = bin - 1
upper_bin = bin + 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
if upper_bin == n_bins - 1:
self.bins_T[bin, i] = self.bins_T[lower_bin, i]
else:
avg_temp = (self.bins_T[lower_bin, i] + self.bins_T[upper_bin, i]) / 2 # Mean temperature of neighbors
self.bins_T[bin, i] = avg_temp
elif bin == 0:
upper_bin = bin + 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
self.bins_T[bin, i] = self.bins_T[upper_bin, i]
else:
lower_bin = bin - 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
self.bins_T[bin, i] = self.bins_T[lower_bin, i]
self.bece_temperature = T_opt_bece
current_ece = ece_criterion(self.bins_temperature_scale(logits), labels).item()
print('ece in iter ', i + 1, ' :', current_ece)
if i > 0 and current_ece < self.ece_list[self.best_iter]:
self.best_iter = i
if abs(self.ece_list[-1] - current_ece) > eps:
self.ece_list.append(current_ece)
else:
self.iters = i + 1
break
weight = self.bece_temperature.unsqueeze(-1).repeat(1, logits.shape[1])
softmaxes = weight * softmaxes + (1 - weight) * 1 / n_classes
confidences, predictions = torch.max(softmaxes, 1)
self.bece_temperature = T_opt_bece
return self
def set_single_dists(self, val_loader, log=True):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE without bins
"""
ece_criterion = ECELoss().cuda()
# First: collect all the logits and labels for the validation set
logits_list = []
labels_list = []
with torch.no_grad():
for input, label in val_loader:
input = input.cuda()
logits = self.model(input)
logits_list.append(logits)
labels_list.append(label)
logits = torch.cat(logits_list).cuda()
labels = torch.cat(labels_list).cuda()
# Calculate ECE before temperature scaling
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - ECE: %.3f' % before_temperature_ece)
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
self.temperature = T
self.cuda()
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
self.temperature = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(self.temperature_scale(logits), labels).item()
if self.log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
softmaxes = F.softmax(logits, 1)
n_classes = logits.shape[1]
eps = 1e-6
ece_val = 10 ** 7
T_opt_ece = 1.0
if not self.grid:
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
n_bins = 15
n, bin_boundaries = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
bin_lowers = bin_boundaries[:-1]
bin_uppers = bin_boundaries[1:]
numerator = []
dominator = []
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
accuracies_temp = accuracies[in_bin]
accuracy_in_bin = accuracies_temp.float().mean().item()
confidence_in_bin = confidences[in_bin].mean()
numerator.append((confidence_in_bin - 1 / n_classes) * (accuracy_in_bin - 1 / n_classes))
dominator.append((confidence_in_bin - 1 / n_classes) * (confidence_in_bin - 1 / n_classes))
numerator = torch.Tensor(numerator)
dominator = torch.Tensor(dominator)
self.weight = torch.sum(numerator) / torch.sum(dominator)
else:
T = 0.001
for i in range(1000):
temp_softmaxes = softmaxes.clone()
self.weight = T
temp_softmaxes = T * temp_softmaxes + (1 - T) * 1 / n_classes
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
after_temperature_ece = ece_criterion(temp_softmaxes, labels, is_logits=False).item()
if ece_val > after_temperature_ece + eps:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.001
self.weight = T_opt_ece
softmaxes = self.weight * softmaxes + (1 - self.weight) * 1 / n_classes
softmaxes /= torch.sum(softmaxes, 1, keepdim=True)
# Calculate ECE after temperature scaling
after_temperature_ece = ece_criterion(softmaxes, labels, is_logits=False).item()
if log:
print('Optimal weight: %.3f' % self.weight)
print('After temperature - ECE: %.3f' % after_temperature_ece)
return self
def temperature_scale2(logits, temperature):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / temperature
def class_temperature_scale2(logits, csece_temperature):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / csece_temperature
def set_temperature2(logits, labels, iters=1, cross_validate='ece',
init_temp=2.5, acc_check=False, const_temp=False, log=True, num_bins=25):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
if const_temp:
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
for i in range(100):
temperature = T
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
if cross_validate == 'ece':
temperature = T_opt_ece
else:
temperature = T_opt_nll
# Calculate NLL and ECE after temperature scaling
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if log:
print('Optimal temperature: %.3f' % temperature)
print('After temperature - NLL: %.3f, ECE: %.3f' % (after_temperature_nll, after_temperature_ece))
else:
"""
Tune single tempearature for the model (using the validation set) with cross-validation on ECE
"""
# Calculate ECE before temperature scaling
ece_criterion = ECELoss(n_bins=num_bins).cuda()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - ECE: %.3f' % (before_temperature_ece))
ece_val = 10 ** 7
T_opt_ece = 1.0
T = 0.1
for i in range(100):
temperature = T
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
init_temp = T_opt_ece
# Calculate NLL and ECE after temperature scaling
after_temperature_ece = ece_criterion(temperature_scale2(logits, init_temp), labels).item()
if log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
"""
Find tempearature vector for the model (using the validation set) with cross-validation on ECE
"""
#ece_criterion = estECELoss(n_bins=num_bins).cuda()
ece_list = []
# Calculate NLL and ECE before temperature scaling
before_temperature_ece = ece_criterion(logits, labels).item()
if acc_check:
_, accuracy, _, _, _ = test_classification_net_logits(logits, labels)
if log:
print('Before temperature - ECE: {0:.3f}'.format(before_temperature_ece))
T_opt_nll = 1.0
T_opt_ece = 1.0
T_opt_csece = init_temp*torch.ones(logits.size()[1]).cuda()
T_csece = init_temp*torch.ones(logits.size()[1]).cuda()
csece_temperature = T_csece
ece_list.append(ece_criterion(class_temperature_scale2(logits, csece_temperature), labels).item())
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(class_temperature_scale2(logits, csece_temperature), labels)
if temp_accuracy >= accuracy:
accuracy = temp_accuracy
steps_limit = 0.2
temp_steps = torch.linspace(-steps_limit, steps_limit, int((2 * steps_limit) / 0.1 + 1))
ece_val = 10 ** 7
csece_val = 10 ** 7
converged = False
prev_temperatures = csece_temperature.clone()
for iter in range(iters):
print('Started iter ' + str(iter))
#while not converged:
for label in range(logits.size()[1]):
#init_temp_value = T_csece[label].item()
T = 0.1
origin_avg_confidence_in_class = confidences[in_bin].mean()
for i in range(100):
#for step in temp_steps:
T_csece[label] = T
#T_csece[label] = init_temp_value + step
csece_temperature = T_csece
temperature = T
after_temperature_ece = ece_criterion(class_temperature_scale2(logits, csece_temperature), labels).item()
if acc_check:
_, temp_accuracy, _, _, _ = test_classification_net_logits(class_temperature_scale2(logits, csece_temperature), labels)
if acc_check:
if csece_val > after_temperature_ece and temp_accuracy >= accuracy:
T_opt_csece[label] = T
csece_val = after_temperature_ece
accuracy = temp_accuracy
else:
if csece_val > after_temperature_ece:
#T_opt_csece[label] = init_temp_value + step
T_opt_csece[label] = T
csece_val = after_temperature_ece
T += 0.1
T_csece[label] = T_opt_csece[label]
print('original average confidence in class ', label + 1, ' :', origin_avg_confidence_in_class.item())
print('ece in class ', label+1, ' :', (prop_in_bin * csece_val).item(), ', number of samples: ', samples)
print('accuracy in class ', label+1, ': ', origin_accuracy_in_bin)
csece_temperature = T_opt_csece
ece_list.append(ece_criterion(class_temperature_scale2(logits, csece_temperature), labels).item())
#converged = torch.all(csece_temperature.eq(prev_temperatures))
#prev_temperatures = csece_temperature.clone()
csece_temperature = T_opt_csece
if const_temp:
return temperature
else:
return csece_temperature, init_temp
def bins_temperature_scale2(logits, bece_temperature):
"""
Perform temperature scaling on logits
"""
# Expand temperature to match the size of logits
return logits / torch.unsqueeze(bece_temperature, -1)
def histedges_equalN(x, n_bins=15):
npt = len(x)
return np.interp(np.linspace(0, npt, n_bins + 1),
np.arange(npt),
np.sort(x))
def equal_bins(x, n_bins=15):
#sorted_samples = np.unique(x.numpy())
#n, bin_boundaries = np.histogram(x, histedges_equalN(x, n_bins=n_bins))
bin_size = int(x.shape[0] / n_bins)
sorted_samples = np.sort(x)
unique_samples, counts = np.unique(sorted_samples, return_counts=True)
many_samples = dict()
for sample, count in zip(unique_samples, counts):
if count > bin_size:
many_samples[sample] = count
bin_boundaries = np.zeros(n_bins + 1)
bin_boundaries[0] = sorted_samples[0]
#bin_boundaries[-1] = 0.9999999
bin_boundaries[-1] = 1.0
counter = 0
i = 1
for sample in sorted_samples:
if counter == bin_size*i:
bin_boundaries[i] = sample
i+=1
counter+=1
return bin_boundaries, many_samples
def bin_ece(logits, accuracies, in_bin, is_logits=True):
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
accuracy_in_bin = min(origin_accuracy_in_bin, 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
prop_in_bin = in_bin.float().mean()
if is_logits:
softmaxes_temp = F.softmax(logits, dim=1)
confidences_temp, _ = torch.max(softmaxes_temp, 1)
else:
confidences_temp, _ = torch.max(logits, 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
samples = logits.shape[0]
ece = (prop_in_bin * after_temperature).item()
return ece, samples, origin_accuracy_in_bin, avg_confidence_in_bin
def bins_temperature_scale_test3(logits, labels, bins_T, iters, bin_boundaries, many_samples, single_temp, best_iter, n_bins=15):
"""
Perform temperature scaling on logits
"""
ece_criterion = ECELoss(n_bins=25).cuda()
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
#confidences[confidences > 0.9995] = 0.9995
logits_np = logits.cpu().detach().numpy()
scaled_logits = logits.clone()
ece_list = []
ece_per_bin = []
single_ece_per_bin = []
original_ece_per_bin = []
print(f'Number of iters: {best_iter + 1}')
for i in range(best_iter + 1):
bin = 0
prev_bin = None
print('\n')
bin_lowers = bin_boundaries[i][:-1]
bin_uppers = bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
"""
# Smoothing
bin_len = max(bin_upper - bin_lower, 1e-5)
conf_location = (confidences[in_bin] - bin_lower) / bin_len
high_conf = conf_location.gt(0.5)
low_conf = conf_location.le(0.5)
if bin > 0 and bin < n_bins - 1:
prev_avg = (bins_T[bin, i] + bins_T[bin - 1, i]) / 2
next_avg = (bins_T[bin + 1, i] + bins_T[bin, i]) / 2
temps_high = (2 * (conf_location[high_conf] - 0.5) * abs(bins_T[bin, i] - next_avg) + bins_T[bin, i]).unsqueeze(dim=-1)
temps_low = (2 * conf_location[low_conf] * abs(bins_T[bin, i] - prev_avg) + prev_avg).unsqueeze(dim=-1)
elif bin == 0:
next_avg = (bins_T[bin + 1, i] + bins_T[bin, i]) / 2
temps_high = (2 * (conf_location[high_conf] - 0.5) * abs(bins_T[bin, i] - next_avg) + bins_T[bin, i]).unsqueeze(dim=-1)
temps_low = bins_T[bin, i]
else:
prev_avg = abs(bins_T[bin, i] + bins_T[bin - 1, i]) / 2
temps_high = bins_T[bin, i]
temps_low = (2 * conf_location[low_conf] * abs(bins_T[bin, i] - prev_avg) + prev_avg).unsqueeze(dim=-1)
#temps[temps == 0] = 1e-5
#scaled_logits[in_bin][high_conf] = scaled_logits[in_bin][high_conf] / temps_high
#scaled_logits[in_bin][low_conf] = scaled_logits[in_bin][low_conf] / temps_low
"""
original_ece, samples, accuracy_in_bin, origin_avg_confidence_in_bin = bin_ece(logits[in_bin], accuracies, in_bin)
original_ece_per_bin.append(original_ece)
scaled_logits[in_bin] = scaled_logits[in_bin] / bins_T[bin, i]
ece, _, _, _ = bin_ece(scaled_logits[in_bin], accuracies, in_bin)
ece_per_bin.append(ece)
single_logits = logits[in_bin] / single_temp
single_ece, _, _, _ = bin_ece(single_logits, accuracies, in_bin)
single_ece_per_bin.append(single_ece)
print('original average confidence in bin ', bin + 1, ' :', origin_avg_confidence_in_bin.item())
print('ece in bin ', bin + 1, ' :', ece,
', number of samples: ', samples)
print('accuracy in bin ', bin + 1, ': ', accuracy_in_bin)
bin += 1
ece_list.append(ece_criterion(scaled_logits, labels).item())
softmaxes = F.softmax(scaled_logits, dim=1)
confidences, _ = torch.max(softmaxes, 1)
print(ece_list)
return scaled_logits, ece_per_bin, single_ece_per_bin, original_ece_per_bin, ece_list
def set_temperature3(logits, labels, iters=1, cross_validate='ece',
init_temp=2.5, const_temp=False, log=True, num_bins=25):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
if const_temp:
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
for i in range(100):
temperature = T
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
if cross_validate == 'ece':
temperature = T_opt_ece
else:
temperature = T_opt_nll
# Calculate NLL and ECE after temperature scaling
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if log:
print('Optimal temperature: %.3f' % temperature)
print('After temperature - NLL: %.3f, ECE: %.3f' % (after_temperature_nll, after_temperature_ece))
else:
"""
Tune single tempearature for the model (using the validation set) with cross-validation on ECE
"""
# Calculate ECE before temperature scaling
ece_criterion = ECELoss(n_bins=num_bins).cuda()
nll_criterion = nn.CrossEntropyLoss().cuda()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - ECE: %.3f' % (before_temperature_ece))
n_bins = num_bins
if cross_validate != 'ece':
n_bins = 50
eps = 1e-5
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
labels = labels.type(torch.LongTensor).cuda()
temps_iters = torch.ones(iters).cuda()
for i in range(iters):
temp_logits = logits.clone()
for t in range(100):
temperature = T
after_temperature_ece = ece_criterion(temperature_scale2(temp_logits, temperature), labels).item()
after_temperature_nll = nll_criterion(temperature_scale2(temp_logits, temperature), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
T += 0.1
if cross_validate == 'ece':
temps_iters[i] = T_opt_ece
else:
temps_iters[i] = T_opt_nll
temp_logits = temp_logits / T_opt_ece
after_temperature_ece = ece_criterion(temperature_scale2(temp_logits, T_opt_ece), labels).item()
print('Temperature for #{} iteration for single TS: {}'.format(i + 1, after_temperature_ece))
if cross_validate == 'ece':
temperature = T_opt_ece
else:
temperature = T_opt_nll
init_temp = temperature
# Calculate ECE after temperature scaling
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
init_temp = 1
#top_temp = T_opt_ece
bins_T = init_temp*torch.ones((n_bins, iters)).cuda()
ece_list = []
ece_list.append(ece_criterion(temperature_scale2(logits, temperature), labels).item())
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
#confidences[confidences > 0.9995] = 0.9995
accuracies = predictions.eq(labels)
bin_boundaries = torch.linspace(0, 1, n_bins + 1).unsqueeze(0).repeat((iters, 1)).numpy()
#steps_limit = 0.2
#temp_steps = torch.linspace(-steps_limit, steps_limit, int((2 * steps_limit) / 0.1 + 1)).cuda()
many_samples = None
original_bins = torch.zeros(confidences.shape)
ece_ada_list = []
count_high_acc = 0
is_acc = False
n, bin_boundaries[0] = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
bin_lowers = bin_boundaries[0][:-1]
bin_uppers = bin_boundaries[0][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
if origin_accuracy_in_bin > 0.99:
count_high_acc += 1
if count_high_acc > int(n_bins/2): # model is highly accurated
is_acc = True
confidences[confidences > 0.9995] = 0.9995
for i in range(iters):
if cross_validate == 'ece':
T_opt_bece = init_temp*torch.ones(logits.shape[0]).cuda()
T_bece = init_temp*torch.ones(logits.shape[0]).cuda()
bece_temperature = T_bece
else:
T_opt_nll = init_temp*torch.ones(logits.shape[0]).cuda()
T_nll = init_temp*torch.ones(logits.shape[0]).cuda()
nll_temperature = T_nll
ece_in_iter = 0
print('iter num ', i+1)
bin = 0
few_examples = dict()
if i == 0:
n, bin_boundaries[i] = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
else:
bin_boundaries[i] = bin_boundaries[i - 1]
if cross_validate != 'ece':
bin_boundaries[i][bin_boundaries[i] > 0.999] = 1
#bin_boundaries = torch.linspace(0, 1, n_bins + 1)
#bin_boundaries[i], many_samples = equal_bins(confidences.cpu().detach(), n_bins=n_bins)
bin_lowers = bin_boundaries[i][:-1]
bin_uppers = bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
if confidences[in_bin].shape[0] < 20 and cross_validate == 'ece':
samples = T_bece[in_bin].shape[0]
print('number of samples in bin {0}: {1}'.format(bin + 1, samples))
few_examples[bin] = samples
bin += 1
continue
if any(in_bin):
#init_temp_value = T_bece[in_bin][0].item()
T = 0.1
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
origin_avg_confidence_in_bin = confidences[in_bin].mean()
accuracy_in_bin = min(origin_accuracy_in_bin, 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
if is_acc and cross_validate == 'ece':
accuracy_in_bin = origin_accuracy_in_bin
if cross_validate == 'ece':
bece_val = torch.abs(accuracy_in_bin - origin_avg_confidence_in_bin)
else:
nll_val = nll_criterion(logits[in_bin] / bins_T[bin, i], labels[in_bin]).item()
for t in range(100):
#for step in temp_steps:
#T_bece[in_bin] = init_temp_value + step
logits_temp = logits.clone()
if cross_validate == 'ece':
T_bece[in_bin] = T
bece_temperature = T_bece
logits_temp[in_bin] = logits[in_bin] / torch.unsqueeze(T_bece[in_bin], -1)
softmaxes_temp = F.softmax(logits_temp, dim=1)
confidences_temp, _ = torch.max(softmaxes_temp[in_bin], 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
if bece_val > after_temperature + eps:
#T_opt_bece[in_bin] = init_temp_value + step
T_opt_bece[in_bin] = T
bece_val = after_temperature
else:
T_nll[in_bin] = T
nll_temperature = T_nll
after_temperature_nll = nll_criterion(logits[in_bin] / torch.unsqueeze(T_nll[in_bin], -1), labels[in_bin]).item()
if nll_val > after_temperature_nll:
T_opt_nll[in_bin] = T
nll_val = after_temperature_nll
T += 0.1
original_bins[in_bin] = bin
if cross_validate == 'ece':
T_bece[in_bin] = T_opt_bece[in_bin]
bins_T[bin, i] = T_opt_bece[in_bin][0].item()
samples = T_bece[in_bin].shape[0]
ece_in_iter += prop_in_bin * bece_val
else:
T_nll[in_bin] = T_opt_nll[in_bin]
bins_T[bin, i] = T_opt_nll[in_bin][0].item()
samples = T_nll[in_bin].shape[0]
ece_in_iter += prop_in_bin * nll_val
print('original average confidence in bin ', bin + 1, ' :', origin_avg_confidence_in_bin.item())
if cross_validate == 'ece':
print('ece in bin ', bin+1, ' :', (prop_in_bin * bece_val).item(), ', number of samples: ', samples)
else:
print('ece in bin ', bin+1, ' :', (prop_in_bin * nll_val).item(), ', number of samples: ', samples)
print('accuracy in bin ', bin+1, ': ', origin_accuracy_in_bin)
bin += 1
print(bins_T[:, i])
if cross_validate == 'ece':
for bin in few_examples:
#bins_T[bin, i] = temperature
if bin > 0 and bin < n_bins - 1:
lower_bin = bin - 1
upper_bin = bin + 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
if upper_bin == n_bins - 1:
bins_T[bin, i] = bins_T[lower_bin, i]
else:
avg_temp = (bins_T[lower_bin, i] + bins_T[upper_bin, i]) / 2 # Mean temperature of neighbors
bins_T[bin, i] = avg_temp
elif bin == 0:
upper_bin = bin + 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
bins_T[bin, i] = bins_T[upper_bin, i]
else:
lower_bin = bin - 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
bins_T[bin, i] = bins_T[lower_bin, i]
if cross_validate == 'ece':
bece_temperature = T_opt_bece
current_ece = ece_criterion(bins_temperature_scale2(logits, bece_temperature), labels).item()
else:
nll_temperature = T_opt_nll
current_ece = ece_criterion(bins_temperature_scale2(logits, nll_temperature), labels).item()
print('ece in iter ', i+1, ' :', current_ece)
if i > 0 and current_ece < ece_list[best_iter]:
best_iter = i
if i == 0:
best_iter = 0
if abs(ece_list[-1] - current_ece) > eps:
ece_list.append(current_ece)
else:
iters = i + 1
break
ece_ada_list.append(ece_in_iter.item())
if cross_validate == 'ece':
logits = logits / torch.unsqueeze(bece_temperature, -1)
else:
logits = logits / torch.unsqueeze(nll_temperature, -1)
softmaxes = F.softmax(logits, dim=1)
confidences, _ = torch.max(softmaxes, 1)
moved_bins = torch.zeros(confidences.shape)
bin = 0
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
moved_bins[in_bin] = bin
bin += 1
bins_moved = torch.eq(original_bins, moved_bins)
moved_precentage = bins_moved.float().mean()
print('Precentage of moved bins after scaling: ', 100 - (moved_precentage * 100).item())
if const_temp:
return temperature
else:
return bins_T, temperature, bin_boundaries, many_samples, best_iter
def check_movements(logits, const):
softmaxes = F.softmax(logits, dim=1)
original_confidences, _ = torch.max(softmaxes, 1)
before_indices = torch.argsort(original_confidences)
moved_softmaxes = F.softmax(logits / const, dim=1)
moved_confidences, _ = torch.max(moved_softmaxes, 1)
after_indices = torch.argsort(moved_confidences)
return before_indices, after_indices
def bins_temperature_scale_test4(logits, labels, bins_T, iters, bin_boundaries, single_temp, best_iter, n_bins=15):
"""
Perform temperature scaling on logits
"""
ece_criterion = ECELoss(n_bins=25).cuda()
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
# confidences[confidences > 0.99999] = 0.99999
logits_np = logits.cpu().detach().numpy()
n_classes = logits.shape[1]
ece_list = []
ece_per_bin = []
single_ece_per_bin = []
original_ece_per_bin = []
print(f'Number of iters: {best_iter + 1}')
for i in range(best_iter + 1):
bin = 0
prev_bin = None
print('\n')
bin_lowers = bin_boundaries[i][:-1]
bin_uppers = bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
original_ece, samples, accuracy_in_bin, origin_avg_confidence_in_bin = bin_ece(logits[in_bin], accuracies, in_bin)
original_ece_per_bin.append(original_ece)
weight = bins_T[bin, i]
softmaxes[in_bin] = weight * softmaxes[in_bin] + (1 - weight) * 1 / n_classes
ece, _, _, _ = bin_ece(softmaxes[in_bin], accuracies, in_bin, is_logits=False)
ece_per_bin.append(ece)
single_logits = logits[in_bin] / single_temp
single_ece, _, _, _ = bin_ece(single_logits, accuracies, in_bin)
single_ece_per_bin.append(single_ece)
print('original average confidence in bin ', bin + 1, ' :', origin_avg_confidence_in_bin.item())
print('ece in bin ', bin + 1, ' :', ece,
', number of samples: ', samples)
print('accuracy in bin ', bin + 1, ': ', accuracy_in_bin)
bin += 1
softmaxes /= torch.sum(softmaxes, 1, keepdim=True)
ece_list.append(ece_criterion(softmaxes, labels, is_logits=False).item())
confidences, _ = torch.max(softmaxes, 1)
print(ece_list)
return softmaxes, ece_per_bin, single_ece_per_bin, original_ece_per_bin, ece_list
def set_temperature4(logits, labels, iters=1, cross_validate='ece',
init_temp=2.5, acc_check=False, const_temp=False, log=True, num_bins=25, top_temp=10):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE or NLL
"""
if const_temp:
nll_criterion = nn.CrossEntropyLoss().cuda()
ece_criterion = ECELoss().cuda()
# Calculate NLL and ECE before temperature scaling
before_temperature_nll = nll_criterion(logits, labels).item()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - NLL: %.3f, ECE: %.3f' % (before_temperature_nll, before_temperature_ece))
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
for i in range(100):
temperature = T
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
T += 0.1
if cross_validate == 'ece':
temperature = T_opt_ece
else:
temperature = T_opt_nll
# Calculate NLL and ECE after temperature scaling
after_temperature_nll = nll_criterion(temperature_scale2(logits, temperature), labels).item()
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if log:
print('Optimal temperature: %.3f' % temperature)
print('After temperature - NLL: %.3f, ECE: %.3f' % (after_temperature_nll, after_temperature_ece))
else:
"""
Tune single tempearature for the model (using the validation set) with cross-validation on ECE
"""
# Calculate ECE before temperature scaling
ece_criterion = ECELoss(n_bins=num_bins).cuda()
nll_criterion = nn.NLLLoss().cuda()
before_temperature_ece = ece_criterion(logits, labels).item()
if log:
print('Before temperature - ECE: %.3f' % (before_temperature_ece))
n_bins = num_bins
eps = 1e-5
nll_val = 10 ** 7
ece_val = 10 ** 7
T_opt_nll = 1.0
T_opt_ece = 1.0
T = 0.1
labels = labels.type(torch.LongTensor).cuda()
temps_iters = torch.ones(iters).cuda()
for i in range(iters):
temp_logits = logits.clone()
for t in range(100):
temperature = T
after_temperature_ece = ece_criterion(temperature_scale2(temp_logits, temperature), labels).item()
after_temperature_nll = nll_criterion(temperature_scale2(temp_logits, temperature), labels).item()
if ece_val > after_temperature_ece:
T_opt_ece = T
ece_val = after_temperature_ece
if nll_val > after_temperature_nll:
T_opt_nll = T
nll_val = after_temperature_nll
T += 0.1
temps_iters[i] = T_opt_ece
temp_logits = temp_logits / T_opt_ece
after_temperature_ece = ece_criterion(temperature_scale2(temp_logits, T_opt_ece), labels).item()
print('Temperature for #{} iteration for single TS: {}'.format(i + 1, after_temperature_ece))
temperature = T_opt_ece
init_temp = temperature
# Calculate ECE after temperature scaling
after_temperature_ece = ece_criterion(temperature_scale2(logits, temperature), labels).item()
if log:
print('Optimal temperature: %.3f' % init_temp)
print('After temperature - ECE: %.3f' % (after_temperature_ece))
init_temp = 0
bins_T = init_temp*torch.ones((n_bins, iters)).cuda()
ece_list = []
ece_list.append(ece_criterion(temperature_scale2(logits, temperature), labels).item())
softmaxes = F.softmax(logits, dim=1)
confidences, predictions = torch.max(softmaxes, 1)
# confidences[confidences > 0.99999] = 0.99999
accuracies = predictions.eq(labels)
bin_boundaries = torch.linspace(0, 1, n_bins + 1).unsqueeze(0).repeat((iters, 1)).numpy()
original_bins = torch.zeros(confidences.shape)
ece_ada_list = []
count_high_acc = 0
is_acc = False
n, bin_boundaries[0] = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
bin_lowers = bin_boundaries[0][:-1]
bin_uppers = bin_boundaries[0][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
if origin_accuracy_in_bin > 0.99:
count_high_acc += 1
if count_high_acc > int(n_bins/2): # model is highly accurated
is_acc = True
confidences[confidences > 0.9999] = 0.9999
n_classes = logits.shape[1]
conf_acc_diff = []
for i in range(iters):
T_opt_bece = init_temp*torch.ones(logits.shape[0]).cuda()
T_bece = init_temp*torch.ones(logits.shape[0]).cuda()
bece_temperature = T_bece
ece_in_iter = 0
print('iter num ', i+1)
bin = 0
few_examples = dict()
if i == 0:
n, bin_boundaries[i] = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
else:
bin_boundaries[i] = bin_boundaries[i - 1]
bin_lowers = bin_boundaries[i][:-1]
bin_uppers = bin_boundaries[i][1:]
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
prop_in_bin = in_bin.float().mean()
if confidences[in_bin].shape[0] < 20:
samples = T_bece[in_bin].shape[0]
print('number of samples in bin {0}: {1}'.format(bin + 1, samples))
few_examples[bin] = samples
if bin == 0:
conf_acc_diff.append(0)
else:
conf_acc_diff.append(conf_acc_diff[-1])
bin += 1
continue
if any(in_bin):
T = 0.0
accuracies_temp = accuracies[in_bin]
origin_accuracy_in_bin = accuracies_temp.float().mean().item()
origin_avg_confidence_in_bin = confidences[in_bin].mean()
accuracy_in_bin = min(origin_accuracy_in_bin, 0.99)
accuracy_in_bin = max(accuracy_in_bin, 0.01)
conf_acc_diff.append(origin_avg_confidence_in_bin - origin_accuracy_in_bin)
if is_acc:
accuracy_in_bin = origin_accuracy_in_bin
"""
bece_val = torch.abs(accuracy_in_bin - origin_avg_confidence_in_bin)
# bece_val = ece_criterion(logits, labels)
for t in range(100):
temp_softmaxes = softmaxes.clone()
T_bece[in_bin] = T
bece_temperature = T_bece
temp_softmaxes[in_bin] = T * temp_softmaxes[in_bin] + (1 - T) * 1 / n_classes
# temp_softmaxes[in_bin] = temp_softmaxes[in_bin] - T * dists[in_bin]
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
confidences_temp, _ = torch.max(temp_softmaxes[in_bin], 1)
avg_confidence_in_bin = confidences_temp.mean()
after_temperature = torch.abs(accuracy_in_bin - avg_confidence_in_bin)
# after_temperature = ece_criterion(temp_softmaxes, labels, is_logits=False)
if bece_val > after_temperature + eps:
T_opt_bece[in_bin] = T
bece_val = after_temperature
T += 0.01
"""
weight_i = (origin_accuracy_in_bin - 1/n_classes) / (origin_avg_confidence_in_bin - 1/n_classes)
# weight_i = max(0, min(1, weight_i.item()))
T_opt_bece[in_bin] = weight_i
# Compute bin's updated ECE
temp_softmaxes = softmaxes.clone()
temp_softmaxes[in_bin] = weight_i * temp_softmaxes[in_bin] + (1 - weight_i) * 1 / n_classes
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
confidences_temp, _ = torch.max(temp_softmaxes[in_bin], 1)
avg_confidence_in_bin = confidences_temp.mean()
bece_val = torch.abs(origin_accuracy_in_bin - avg_confidence_in_bin)
original_bins[in_bin] = bin
T_bece[in_bin] = T_opt_bece[in_bin]
bins_T[bin, i] = T_opt_bece[in_bin][0].item()
samples = T_bece[in_bin].shape[0]
ece_in_iter += prop_in_bin * bece_val
print('original average confidence in bin ', bin + 1, ' :', origin_avg_confidence_in_bin.item())
print('ece in bin ', bin+1, ' :', (prop_in_bin * bece_val).item(), ', number of samples: ', samples)
print('accuracy in bin ', bin+1, ': ', origin_accuracy_in_bin)
bin += 1
print(bins_T[:, i])
for bin in few_examples:
#bins_T[bin, i] = temperature
if bin > 0 and bin < n_bins - 1:
lower_bin = bin - 1
upper_bin = bin + 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
if upper_bin == n_bins - 1:
bins_T[bin, i] = bins_T[lower_bin, i]
else:
avg_temp = (bins_T[lower_bin, i] + bins_T[upper_bin, i]) / 2 # Mean temperature of neighbors
bins_T[bin, i] = avg_temp
elif bin == 0:
upper_bin = bin + 1
while upper_bin in few_examples and upper_bin + 1 <= n_bins - 1:
upper_bin += 1
bins_T[bin, i] = bins_T[upper_bin, i]
else:
lower_bin = bin - 1
while lower_bin in few_examples and lower_bin - 1 >= 0:
lower_bin -= 1
bins_T[bin, i] = bins_T[lower_bin, i]
bece_temperature = T_opt_bece
ece_ada_list.append(ece_in_iter.item())
weight = bece_temperature.unsqueeze(-1).repeat(1, logits.shape[1])
softmaxes = weight * softmaxes + (1 - weight) * 1 / n_classes
# softmaxes = softmaxes - bece_temperature.unsqueeze(-1).repeat(1, dists.shape[1]) * dists
confidences, _ = torch.max(softmaxes, 1)
current_ece = ece_criterion(softmaxes, labels, is_logits=False).item()
print('ece in iter ', i+1, ' :', current_ece)
if i > 0 and current_ece < ece_list[best_iter]:
best_iter = i
if i == 0:
best_iter = 0
if abs(ece_list[-1] - current_ece) > eps:
ece_list.append(current_ece)
else:
iters = i + 1
break
moved_bins = torch.zeros(confidences.shape)
bin = 0
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
moved_bins[in_bin] = bin
bin += 1
bins_moved = torch.eq(original_bins, moved_bins)
moved_precentage = bins_moved.float().mean()
print('Precentage of moved bins after scaling: ', 100 - (moved_precentage * 100).item())
if const_temp:
return temperature
else:
return bins_T, temperature, bin_boundaries, best_iter, conf_acc_diff
def bins_temperature_scale_test5(logits, weight):
"""
Perform weight scaling on logits
"""
softmaxes = F.softmax(logits, 1)
n_classes = logits.shape[1]
softmaxes = weight * softmaxes + (1 - weight) * 1 / n_classes
return softmaxes
def set_temperature5(logits, labels, log=True):
"""
Tune the tempearature of the model (using the validation set) with cross-validation on ECE without bins
"""
ece_criterion = ECELoss().cuda()
# Calculate ECE before temperature scaling
before_weight_ece = ece_criterion(logits, labels).item()
if log:
print('Before weight scaling - ECE: %.3f' % before_weight_ece)
softmaxes = F.softmax(logits, 1)
confidences, predictions = torch.max(softmaxes, 1)
accuracies = predictions.eq(labels)
n_classes = logits.shape[1]
n_bins = 13
ece_val = 10 ** 7
T_opt_ece = 1.0
n, bin_boundaries = np.histogram(confidences.cpu().detach(), histedges_equalN(confidences.cpu().detach(), n_bins=n_bins))
bin_lowers = bin_boundaries[:-1]
bin_uppers = bin_boundaries[1:]
numerator = []
dominator = []
for bin_lower, bin_upper in zip(bin_lowers, bin_uppers):
in_bin = confidences.gt(bin_lower.item()) * confidences.le(bin_upper.item())
if any(in_bin):
accuracies_temp = accuracies[in_bin]
accuracy_in_bin = accuracies_temp.float().mean().item()
confidence_in_bin = confidences[in_bin].mean()
numerator.append((confidence_in_bin - 1 / n_classes) * (accuracy_in_bin - 1 / n_classes))
dominator.append((confidence_in_bin - 1 / n_classes) * (confidence_in_bin - 1 / n_classes))
"""
T = 0.01
for i in range(100):
temp_softmaxes = softmaxes.clone()
weight = T
temp_softmaxes = temp_softmaxes - T * dists
temp_softmaxes /= torch.sum(temp_softmaxes, 1, keepdim=True)
after_weight_ece = ece_criterion(temp_softmaxes, labels, is_logits=False).item()
if ece_val > after_weight_ece:
T_opt_ece = T
ece_val = after_weight_ece
T += 0.01
"""
numerator = torch.Tensor(numerator)
dominator = torch.Tensor(dominator)
weight = torch.sum(numerator) / torch.sum(dominator)
softmaxes = weight * softmaxes + (1 - weight) * 1 / n_classes
# Calculate ECE after temperature scaling
after_weight_ece = ece_criterion(softmaxes, labels, is_logits=False).item()
if log:
print('Optimal weight: %.3f' % weight)
print('After weight scaling - ECE: %.3f' % after_weight_ece)
return weight
| 46.244752
| 143
| 0.546836
| 10,520
| 90,316
| 4.411977
| 0.027662
| 0.034688
| 0.038889
| 0.024648
| 0.897531
| 0.867691
| 0.840845
| 0.81999
| 0.799522
| 0.779765
| 0
| 0.019694
| 0.360191
| 90,316
| 1,952
| 144
| 46.268443
| 0.783525
| 0.075114
| 0
| 0.834607
| 0
| 0
| 0.032283
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020848
| false
| 0
| 0.006254
| 0
| 0.054204
| 0.066713
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c1947c64c0c2be625984290e3b3258901ff06f7
| 34,129
|
py
|
Python
|
fonts/vga2_bold_16x16.py
|
eldan-dex/ESP32-corona-tracker
|
0fee95c191cb6a5c18dbd2b97d37224f44a95d07
|
[
"MIT"
] | 1
|
2021-01-25T18:50:05.000Z
|
2021-01-25T18:50:05.000Z
|
fonts/vga2_bold_16x16.py
|
eldan-dex/ESP32-corona-tracker
|
0fee95c191cb6a5c18dbd2b97d37224f44a95d07
|
[
"MIT"
] | null | null | null |
fonts/vga2_bold_16x16.py
|
eldan-dex/ESP32-corona-tracker
|
0fee95c191cb6a5c18dbd2b97d37224f44a95d07
|
[
"MIT"
] | null | null | null |
WIDTH = 16
HEIGHT = 16
FIRST = 0
LAST = 255
_FONT = \
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfc\x60\x06\xc0\x03\xcc\x33\xc0\x03\xc0\x03\xcf\xf3\xc3\xc3\xc0\x03\x60\x06\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfc\x7f\xfe\xff\xff\xf3\xcf\xff\xff\xff\xff\xf0\x0f\xfc\x3f\xff\xff\x7f\xfe\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x1e\x78\x3f\xfc\x7f\xfe\x7f\xfe\x7f\xfe\x3f\xfc\x1f\xf8\x07\xe0\x01\x80\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x01\x80\x03\xc0\x07\xe0\x0f\xf0\x1f\xf8\x0f\xf0\x07\xe0\x03\xc0\x01\x80\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x07\xe0\x07\xe0\x3c\x3c\x7c\x3e\x7c\x3e\x3c\x3c\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3f\xfc\xff\xff\xff\xff\xff\xff\x3f\xfc\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x0f\xf0\x0f\xf0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x3f\xf0\x0f\xf0\x0f\xfc\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf0\x3c\x3c\x30\x0c\x30\x0c\x3c\x3c\x0f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf0\x0f\xc3\xc3\xcf\xf3\xcf\xf3\xc3\xc3\xf0\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\x00\x00\x00\x00\x00\x00\x00\x7f\x00\x1f\x00\x3f\x00\x7b\x07\xf0\x1e\x78\x3c\x3c\x3c\x3c\x1e\x78\x07\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xe0\x1e\x78\x3c\x3c\x3c\x3c\x1e\x78\x07\xe0\x03\xc0\x03\xc0\x3f\xfc\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xff\x0f\x0f\x0f\xff\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x3f\x00\x7f\x00\x3e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x1f\xfe\x1e\x1e\x1f\xfe\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x3e\x3e\x7e\x7e\x3c\x3c\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\xf3\xcf\x0f\xf0\xfc\x3f\x0f\xf0\xf3\xcf\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x30\x00\x3c\x00\x3f\x00\x3f\xc0\x3f\xf0\x3f\xfc\x3f\xf0\x3f\xc0\x3f\x00\x3c\x00\x30\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x0c\x00\x3c\x00\xfc\x03\xfc\x0f\xfc\x3f\xfc\x0f\xfc\x03\xfc\x00\xfc\x00\x3c\x00\x0c\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3f\xfc\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x3f\xfc\x0f\xf0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x0f\x00\x00\x0f\x0f\x0f\x0f\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xff\xf3\xcf\xf3\xcf\xf3\xcf\xf3\xcf\x3f\xcf\x03\xcf\x03\xcf\x03\xcf\x03\xcf\x03\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0f\xfc\x3c\x0f\x0f\x00\x03\xf0\x0f\x3c\x3c\x0f\x3c\x0f\x0f\x3c\x03\xf0\x00\x3c\x3c\x0f\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\x3f\xff\x3f\xff\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3f\xfc\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x3f\xfc\x0f\xf0\x03\xc0\x3f\xfc\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3f\xfc\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x3f\xfc\x0f\xf0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x00\x3c\x3f\xff\x00\x3c\x00\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x0f\x00\x3f\xff\x0f\x00\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x00\x3c\x00\x3c\x00\x3f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x30\x3c\x3c\xff\xff\x3c\x3c\x0c\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\x80\x03\xc0\x07\xe0\x0f\xf0\x1f\xf8\x3f\xfc\x7f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xfe\x3f\xfc\x1f\xf8\x0f\xf0\x07\xe0\x03\xc0\x01\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x07\xe0\x0f\xf0\x0f\xf0\x0f\xf0\x07\xe0\x03\xc0\x03\xc0\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3c\x3c\x3c\x3c\x1c\x38\x0c\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x1c\x38\x1c\x38\x7f\xfe\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x7f\xfe\x1c\x38\x1c\x38\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x03\xc0\x07\xe0\x1e\x78\x3c\x3c\x3c\x00\x1e\x00\x07\xe0\x00\x78\x00\x3c\x3c\x3c\x1e\x78\x07\xe0\x03\xc0\x03\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x3c\x3c\x78\x00\xf0\x01\xe0\x03\xc0\x07\x80\x0f\x00\x1e\x3c\x3c\x3c\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xc0\x1e\xf0\x3c\x78\x1e\xf0\x07\xc0\x0f\x9e\x3f\xfc\x78\xf8\x78\x78\x3c\xfc\x0f\x9e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0f\x00\x0f\x00\x0f\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\xe0\x03\xc0\x07\x80\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x07\x80\x03\xc0\x01\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\xe0\x00\xf0\x00\x78\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x78\x00\xf0\x01\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x3c\x0f\xf0\x7f\xfe\x0f\xf0\x3c\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x03\xc0\x7f\xfe\x03\xc0\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x03\x80\x07\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x3c\x00\x78\x00\xf0\x01\xe0\x03\xc0\x07\x80\x0f\x00\x1e\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xe0\x1e\x78\x3c\x3c\x3c\x7c\x3c\xfc\x3d\xbc\x3f\x3c\x3e\x3c\x3c\x3c\x1e\x78\x07\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xc0\x3f\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xe0\x3c\x78\x00\x3c\x00\x3c\x00\x78\x00\xf0\x03\xc0\x0f\x00\x1e\x00\x3c\x3c\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x3c\x3c\x00\x1e\x00\x1e\x00\x3c\x03\xf0\x00\x3c\x00\x1e\x00\x1e\x3c\x3c\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\xf0\x03\xf0\x07\xf0\x0f\xf0\x1e\xf0\x3c\xf0\x3f\xfc\x00\xf0\x00\xf0\x00\xf0\x03\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfe\x3c\x00\x3c\x00\x3c\x00\x3f\xf0\x00\x3c\x00\x1e\x00\x1e\x00\x1e\x3c\x3c\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x00\x3c\x00\x3c\x00\x3c\x00\x3f\xf0\x3c\x3c\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfc\x3c\x3c\x00\x3c\x00\x78\x00\xf0\x01\xe0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x1e\x1e\x07\xfe\x00\x1e\x00\x1e\x00\x1e\x00\x3c\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x07\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\xe0\x03\xc0\x07\x80\x0f\x00\x1e\x00\x3c\x00\x1e\x00\x0f\x00\x07\x80\x03\xc0\x01\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\x80\x03\xc0\x01\xe0\x00\xf0\x00\x78\x00\x3c\x00\x78\x00\xf0\x01\xe0\x03\xc0\x07\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xe0\x1e\x78\x3c\x3c\x00\x78\x00\xf0\x01\xe0\x03\xc0\x03\xc0\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xfc\x3c\x1e\x78\x1e\x79\xfe\x7b\x8e\x7b\x8e\x7b\x8e\x79\xfc\x78\x00\x3c\x00\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x07\xe0\x0f\xf0\x1e\x78\x3c\x3c\x3c\x3c\x3f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xf0\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x3c\x1f\xf0\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x3c\x7f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xf0\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x3c\x7f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xfe\x1e\x0e\x1e\x06\x1e\x00\x1e\x60\x1f\xe0\x1e\x60\x1e\x00\x1e\x06\x1e\x0e\x7f\xfe\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xfe\x1e\x0e\x1e\x06\x1e\x00\x1e\x60\x1f\xe0\x1e\x60\x1e\x00\x1e\x00\x1e\x00\x7f\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x00\x3c\x00\x3c\x00\x3c\x7e\x3c\x1e\x3c\x1e\x1e\x3e\x07\xf6\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3f\xfe\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xfe\x00\x78\x00\x78\x00\x78\x00\x78\x00\x78\x00\x78\x00\x78\x3c\x78\x1f\xf0\x07\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7e\x3c\x1e\x78\x1e\xf0\x1f\xe0\x1f\xc0\x1f\xc0\x1f\xe0\x1e\xf0\x1e\x78\x1e\x3c\x7e\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\x80\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x06\x1e\x0e\x7f\xfe\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x78\x1e\x7c\x3e\x7e\x7e\x7f\xfe\x7b\xde\x79\x9e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x3e\x1e\x3f\x1e\x3f\x9e\x3d\xde\x3c\xfe\x3c\x7e\x3c\x3e\x3c\x1e\x3c\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xf0\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x3c\x1f\xf0\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x7f\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3d\xde\x3c\xfe\x1e\x7c\x07\xf8\x00\x1c\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xf0\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x3c\x1f\xf0\x1f\xe0\x1e\xf0\x1e\x78\x1e\x3c\x7e\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x3c\x3c\x78\x1e\x3c\x00\x0f\x00\x03\xc0\x00\xf0\x00\x3c\x78\x1e\x3c\x3c\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xfe\x73\xce\x63\xc6\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x1e\x78\x0f\xf0\x07\xe0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x79\x9e\x7b\xde\x7f\xfe\x3e\x7c\x1c\x38\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x1e\x78\x0f\xf0\x07\xe0\x03\xc0\x07\xe0\x0f\xf0\x1e\x78\x3c\x3c\x3c\x3c\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x1e\x78\x0f\xf0\x07\xe0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfc\x38\x3c\x30\x78\x00\xf0\x01\xe0\x03\xc0\x07\x80\x0f\x00\x1e\x0c\x3c\x1c\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x00\x1e\x00\x0f\x00\x07\x80\x03\xc0\x01\xe0\x00\xf0\x00\x78\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x07\xe0\x0f\xf0\x1e\x78\x3c\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xff\x00\x00'\
b'\x03\xc0\x03\xc0\x01\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xe0\x00\x78\x0f\xf8\x3c\x78\x3c\x78\x3c\x78\x0f\x9e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\xf0\x0f\x3c\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x3c\xf8\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x3c\x1e\x3c\x00\x3c\x00\x3c\x00\x3c\x1e\x0f\xf8\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\xf8\x00\x78\x00\x78\x00\x78\x07\xf8\x1e\x78\x3c\x78\x3c\x78\x3c\x78\x3c\x78\x0f\x9e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x3c\x1e\x3c\x1e\x3f\xfe\x3c\x00\x3c\x1e\x0f\xf8\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xf0\x0f\x3c\x0f\x0c\x0f\x00\x0f\x00\x3f\xf0\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x3f\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x9e\x3c\x78\x3c\x78\x3c\x78\x3c\x78\x0f\xf8\x00\x78\x3c\x78\x0f\xe0\x00\x00'\
b'\x00\x00\x00\x00\x3f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x78\x0f\x9e\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x3f\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xf0\x00\xf0\x00\x00\x03\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x03\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x3c\x00\x3c\x00\x00\x00\xfc\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x3c\x3c\x1e\x78\x07\xe0\x00\x00'\
b'\x00\x00\x00\x00\x3f\x00\x0f\x00\x0f\x00\x0f\x00\x0f\x1e\x0f\x3c\x0f\x78\x0f\xf0\x0f\x78\x0f\x3c\x3f\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x03\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7e\x7c\x7f\xfe\x7b\xde\x7b\xde\x7b\xde\x7b\xde\x7b\xde\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\xf8\x0f\x3c\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\xf0\x0f\x3c\x0f\x1e\x0f\x1e\x0f\x3c\x0f\xf0\x0f\x00\x0f\x00\x3f\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x9e\x1e\x78\x3c\x78\x3c\x78\x1e\x78\x07\xf8\x00\x78\x00\x78\x00\xfe\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\xf8\x0f\x9e\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x3f\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x3c\x1e\x3c\x00\x0f\xf8\x00\x1e\x3c\x1e\x0f\xf8\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\x80\x03\x80\x07\x80\x07\x80\x7f\xf8\x07\x80\x07\x80\x07\x80\x07\x80\x07\x9e\x01\xf8\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x78\x3c\x78\x3c\x78\x3c\x78\x3c\x78\x3c\x78\x0f\x9e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x1e\x78\x07\xe0\x01\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x1e\x78\x1e\x78\x1e\x79\x9e\x7b\xde\x3f\xfc\x1e\x78\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x3c\x0e\x70\x07\xe0\x03\xc0\x07\xe0\x0e\x70\x3c\x3c\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x1e\x07\xfe\x00\x1e\x00\x3c\x0f\xf0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\x3c\x3c\x00\xf0\x03\xc0\x0f\x00\x3c\x3c\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfc\x01\xe0\x03\xc0\x03\xc0\x03\xc0\x3f\x80\x03\xc0\x03\xc0\x03\xc0\x01\xe0\x00\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x00\x00\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\x00\x07\x80\x03\xc0\x03\xc0\x03\xc0\x01\xfc\x03\xc0\x03\xc0\x03\xc0\x07\x80\x3f\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x9e\x3c\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x03\xf0\x0f\x3c\x3c\x0f\x3c\x0f\x3f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf8\x1e\x1e\x3c\x06\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x06\x1e\x1e\x07\xf8\x00\x78\x00\x1e\x0f\xf8\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x3c\x00\xf0\x03\xc0\x00\x00\x0f\xfc\x3c\x0f\x3c\x0f\x3f\xff\x3c\x00\x3c\x0f\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc0\x03\xf0\x0f\x3c\x00\x00\x0f\xf0\x00\x3c\x0f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x00\x00\x0f\xf0\x00\x3c\x0f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x00\x03\xc0\x00\xf0\x00\x00\x0f\xf0\x00\x3c\x0f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xf0\x0f\x3c\x03\xf0\x00\x00\x0f\xf0\x00\x3c\x0f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf0\x3c\x3c\x3c\x00\x3c\x00\x3c\x3c\x0f\xf0\x00\xf0\x00\x3c\x0f\xf0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xc0\x03\xf0\x0f\x3c\x00\x00\x0f\xfc\x3c\x0f\x3c\x0f\x3f\xff\x3c\x00\x3c\x0f\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x00\x00\x0f\xfc\x3c\x0f\x3c\x0f\x3f\xff\x3c\x00\x3c\x0f\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x00\x03\xc0\x00\xf0\x00\x00\x0f\xfc\x3c\x0f\x3c\x0f\x3f\xff\x3c\x00\x3c\x0f\x0f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x3c\x3c\x3c\x00\x00\x0f\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3c\x3c\x00\x00\x0f\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x00\x0f\x00\x03\xc0\x00\x00\x0f\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3c\x0f\x3c\x0f\x00\xc0\x03\xf0\x0f\x3c\x3c\x0f\x3c\x0f\x3c\x0f\x3f\xff\x3c\x0f\x3c\x0f\x3c\x0f\x00\x00\x00\x00\x00\x00'\
b'\x03\xf0\x0f\x3c\x03\xf0\x00\x00\x03\xf0\x0f\x3c\x3c\x0f\x3c\x0f\x3c\x0f\x3f\xff\x3c\x0f\x3c\x0f\x3c\x0f\x00\x00\x00\x00\x00\x00'\
b'\x00\xf0\x03\xc0\x0f\x00\x00\x00\x3f\xff\x0f\x0f\x0f\x00\x0f\x00\x0f\xfc\x0f\x00\x0f\x00\x0f\x0f\x3f\xff\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfe\x03\xcf\x03\xcf\x7f\xfe\xf3\xc0\xf3\xc0\x7f\xff\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xff\x0f\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3f\xff\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3f\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\x80\x07\xe0\x1e\x78\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x00\x03\xc0\x00\xf0\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x0f\xf0\x3c\x3c\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\x00\x03\xc0\x00\xf0\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x1e\x3c\x1e\x00\x00\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x1e\x07\xfe\x00\x1e\x00\x3c\x0f\xf0\x00\x00'\
b'\x00\x00\x3c\x1e\x3c\x1e\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3c\x1e\x3c\x1e\x00\x00\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x03\xc0\x1f\xf8\x78\x1e\x78\x00\x78\x00\x78\x00\x78\x1e\x1f\xf8\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xf0\x0f\x3c\x0f\x0c\x0f\x00\x3f\xc0\x0f\x00\x0f\x00\x0f\x00\x0f\x00\x3f\x0f\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x78\x1e\x1e\x78\x07\xe0\x03\xc0\x03\xc0\x7f\xfe\x03\xc0\x7f\xfe\x03\xc0\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\xff\xf0\x3c\x3c\x3c\x3c\x3f\xf0\x3c\x0c\x3c\x3c\x3c\xff\x3c\x3c\x3c\x3c\x3c\x3c\xff\x0f\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfc\x03\xcf\x03\xc0\x03\xc0\x03\xc0\x3f\xfc\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xf3\xc0\x3f\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xf0\x03\xc0\x0f\x00\x00\x00\x0f\xf0\x00\x3c\x0f\xfc\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xf0\x03\xc0\x0f\x00\x00\x00\x0f\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x0f\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xf0\x03\xc0\x0f\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xf0\x03\xc0\x0f\x00\x00\x00\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xcf\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x1f\x9e\x79\xf8\x00\x00\x79\xf8\x1e\x3c\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x1f\x9e\x79\xf8\x00\x00\x78\x1e\x78\x1e\x7e\x1e\x7f\x9e\x7f\xfe\x79\xfe\x78\x7e\x78\x1e\x78\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x3c\xf0\x3c\xf0\x0f\xfc\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xc0\x3c\xf0\x3c\xf0\x0f\xc0\x00\x00\x3f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x03\xc0\x03\xc0\x03\xc0\x07\x80\x1e\x00\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\x3c\x00\x3c\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xff\x00\x0f\x00\x0f\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x00\xfc\x00\x3c\x0f\x3c\x3c\x3c\xf0\x03\xc0\x0f\x00\x3c\xfc\xf0\x0f\x00\x3c\x00\xf0\x03\xff\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3c\x00\xfc\x00\x3c\x0f\x3c\x3c\x3c\xf0\x03\xc0\x0f\x0f\x3c\x3f\xf0\xe7\x03\xff\x00\x0f\x00\x0f\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x03\xc0\x03\xc0\x07\xe0\x0f\xf0\x0f\xf0\x0f\xf0\x07\xe0\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xcf\x0f\x3c\x3c\xf0\x0f\x3c\x03\xcf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\xf0\x0f\x3c\x03\xcf\x0f\x3c\x3c\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30\x03\x03\x30\x30'\
b'\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc\x33\x33\xcc\xcc'\
b'\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f\xf3\xf3\x3f\x3f'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xc0\x03\xc0\xff\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfc\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x03\xc0\xff\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\x3c\x00\x3c\xff\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfc\x00\x3c\xff\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\x3c\x00\x3c\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xc0\x03\xc0\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xff\x03\xc0\x03\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3f\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3f\x0f\x00\x0f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\x0f\x00\x0f\x3f\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\x3f\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\x3f\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3f\x0f\x00\x0f\x3f\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\x3f\x00\x00\xff\x3f\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xff\x03\xc0\x03\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x03\xc0\x03\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\xff\xff\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c\x0f\x3c'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xff\x03\xc0\xff\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xff\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'\
b'\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00'\
b'\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff'\
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x9e\x1f\xfc\x3c\xf8\x3c\xf0\x3c\xf8\x1f\xfc\x07\x9e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x3c\x1e\x3c\x1e\x3f\xf8\x3c\x1e\x3c\x1e\x3f\xf8\x3c\x00\x3c\x00\x0c\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfe\x3c\x1e\x3c\x1e\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x3c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xfe\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x78\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfe\x3c\x1e\x3c\x00\x0f\x00\x03\xc0\x00\xf0\x03\xc0\x0f\x00\x3c\x00\x3c\x1e\x3f\xfe\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfe\x1e\x78\x3c\x78\x3c\x78\x3c\x78\x1e\xf0\x07\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x0f\x1e\x0f\xf8\x0f\x00\x0f\x00\x3c\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xcf\x3c\xfc\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3f\xfc\x03\xc0\x0f\xf0\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x3c\x0f\xf0\x03\xc0\x3f\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x3f\xfe\x3c\x1e\x3c\x1e\x3c\x1e\x1e\x3c\x07\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0f\xf0\x3c\x3c\x78\x1e\x78\x1e\x78\x1e\x78\x1e\x3c\x3c\x1e\x78\x1e\x78\x1e\x78\x7e\x7e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xfc\x0f\x00\x0f\x00\x03\xc0\x00\xf0\x07\xfc\x1e\x3c\x3c\x3c\x3c\x3c\x1e\x78\x07\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\xf3\xcf\xf3\xcf\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x38\x0f\xf0\x3c\xfc\x79\xde\x7b\x9e\x3f\x3c\x0f\xf0\x1c\x00\x38\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x03\xf0\x0f\x00\x3c\x00\x3c\x00\x3c\x00\x3f\xf0\x3c\x00\x3c\x00\x3c\x00\x0f\x00\x03\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x07\xf0\x1e\x3c\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x3c\x1e\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfe\x00\x00\x00\x00\x3f\xfe\x00\x00\x00\x00\x3f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x03\xc0\x7f\xfe\x03\xc0\x03\xc0\x03\xc0\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x0f\x00\x03\xc0\x00\xf0\x00\x3c\x00\xf0\x03\xc0\x0f\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xf0\x03\xc0\x0f\x00\x3c\x00\x0f\x00\x03\xc0\x00\xf0\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfc\x03\xcf\x03\xcf\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0'\
b'\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\x03\xc0\xf3\xc0\xf3\xc0\x3f\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xcf\x3c\xfc\x00\x00\x0f\xcf\x3c\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0f\xc0\x3c\xf0\x3c\xf0\x0f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xff\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\x00\xf0\xfc\xf0\x3c\xf0\x0f\xf0\x03\xf0\x00\xf0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xf3\xc0\x3c\xf0\x3c\xf0\x3c\xf0\x3c\xf0\x3c\xf0\x3c\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3f\x00\xf3\xc0\x03\xc0\x0f\x00\x3c\x00\xf0\xc0\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xfc\x0f\xfc\x0f\xfc\x0f\xfc\x0f\xfc\x0f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
FONT = memoryview(_FONT)
| 128.304511
| 132
| 0.729116
| 8,460
| 34,129
| 2.941135
| 0.006856
| 0.698336
| 0.871353
| 0.955872
| 0.945422
| 0.933124
| 0.913713
| 0.903545
| 0.880637
| 0.862993
| 0
| 0.366448
| 0.008116
| 34,129
| 265
| 133
| 128.788679
| 0.368575
| 0
| 0
| 0.007634
| 0
| 0.977099
| 0.960178
| 0.960178
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 16
|
d5d8c0ca531aac6ef633c8e6f1ca3ffbb03bd739
| 12,481
|
py
|
Python
|
test/test_galkin.py
|
LBJ-Wade/galkin
|
35dc2a190df74716544831887c8f2f8334b36d8f
|
[
"MIT"
] | 1
|
2018-04-02T07:45:03.000Z
|
2018-04-02T07:45:03.000Z
|
test/test_galkin.py
|
LBJ-Wade/galkin
|
35dc2a190df74716544831887c8f2f8334b36d8f
|
[
"MIT"
] | 1
|
2017-10-16T20:29:18.000Z
|
2017-10-17T01:28:16.000Z
|
test/test_galkin.py
|
sibirrer/galkin
|
35dc2a190df74716544831887c8f2f8334b36d8f
|
[
"MIT"
] | null | null | null |
"""
Tests for `galkin` module.
"""
import pytest
import numpy.testing as npt
import numpy as np
import scipy.integrate as integrate
from galkin.galkin_old import GalKin_old
from galkin.galkin import Galkin
from galkin.light_profile import LightProfile
from galkin.LOS_dispersion import Velocity_dispersion
class TestGalkin(object):
def setup(self):
pass
def test_galkin_vs_LOS_dispersion(self):
"""
tests whether the old and new version provide the same answer
:return:
"""
# light profile
light_profile = 'Hernquist'
r_eff = 0.5
kwargs_light = {'r_eff': r_eff} # effective half light radius (2d projected) in arcsec
# mass profile
mass_profile = 'power_law'
theta_E = 1.2
gamma = 2.
kwargs_profile = {'theta_E': theta_E, 'gamma': gamma} # Einstein radius (arcsec) and power-law slope
# anisotropy profile
anisotropy_type = 'r_ani'
r_ani = 0.5
kwargs_anisotropy = {'r_ani': r_ani} # anisotropy radius [arcsec]
# aperture as shell
#aperture_type = 'shell'
#kwargs_aperture_inner = {'r_in': 0., 'r_out': 0.2, 'center_dec': 0, 'center_ra': 0}
#kwargs_aperture_outer = {'r_in': 0., 'r_out': 1.5, 'center_dec': 0, 'center_ra': 0}
# aperture as slit
aperture_type = 'slit'
length = 3.8
width = 0.9
kwargs_aperture = {'length': length, 'width': width, 'center_ra': 0, 'center_dec': 0, 'angle': 0}
psf_fwhm = 0.1 # Gaussian FWHM psf
kwargs_cosmo = {'D_d': 1000, 'D_s': 1500, 'D_ds': 800}
galkin = GalKin_old(aperture=aperture_type, mass_profile=mass_profile, light_profile=light_profile,
anisotropy_type=anisotropy_type, psf_fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo)
sigma_v = galkin.vel_disp(kwargs_profile, kwargs_aperture, kwargs_light, kwargs_anisotropy, num=1000)
los_disp = Velocity_dispersion(beta_const=False, b_prior=False, kwargs_cosmo=kwargs_cosmo)
sigma_v2 = los_disp.vel_disp(gamma, theta_E, r_eff, aniso_param=r_ani, R_slit=length, dR_slit=width,
FWHM=psf_fwhm, num=1000)
npt.assert_almost_equal((sigma_v-sigma_v2)/sigma_v2, 0, decimal=2)
def test_log_linear_integral(self):
# light profile
light_profile_list = ['HERNQUIST']
r_eff = .5
kwargs_light = [{'Rs': r_eff, 'sigma0': 1.}] # effective half light radius (2d projected) in arcsec
# 0.551 *
# mass profile
mass_profile_list = ['SPP']
theta_E = 1.2
gamma = 2.
kwargs_profile = [{'theta_E': theta_E, 'gamma': gamma}] # Einstein radius (arcsec) and power-law slope
# anisotropy profile
anisotropy_type = 'OsipkovMerritt'
r_ani = 2.
kwargs_anisotropy = {'r_ani': r_ani} # anisotropy radius [arcsec]
# aperture as slit
aperture_type = 'slit'
psf_fwhm = 0.7 # Gaussian FWHM psf
kwargs_cosmo = {'D_d': 1000, 'D_s': 1500, 'D_ds': 800}
kwargs_numerics_linear = {'sampling_number': 5000, 'interpol_grid_num': 5000, 'log_integration': False,
'max_integrate': 50}
kwargs_numerics_log = {'sampling_number': 5000, 'interpol_grid_num': 5000, 'log_integration': True,
'max_integrate': 50}
galkin_linear = Galkin(mass_profile_list, light_profile_list, aperture_type=aperture_type,
anisotropy_model=anisotropy_type, fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo, kwargs_numerics=kwargs_numerics_linear)
galkin_log = Galkin(mass_profile_list, light_profile_list, aperture_type=aperture_type,
anisotropy_model=anisotropy_type, fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo, kwargs_numerics=kwargs_numerics_log)
R = np.linspace(0.05, 1, 100)
lin_I_R = np.zeros_like(R)
log_I_R = np.zeros_like(R)
for i in range(len(R)):
lin_I_R[i] = galkin_linear.I_R_simga2(R[i], kwargs_profile, kwargs_light, kwargs_anisotropy)
log_I_R[i] = galkin_log.I_R_simga2(R[i], kwargs_profile, kwargs_light, kwargs_anisotropy)
print(log_I_R/lin_I_R)
for i in range(len(R)):
npt.assert_almost_equal(log_I_R[i] / lin_I_R[i], 1, decimal=2)
def test_log_vs_linear_integral(self):
# light profile
light_profile_list = ['HERNQUIST']
r_eff = .5
kwargs_light = [{'Rs': r_eff, 'sigma0': 1.}] # effective half light radius (2d projected) in arcsec
# 0.551 *
# mass profile
mass_profile_list = ['SPP']
theta_E = 1.2
gamma = 2.
kwargs_profile = [{'theta_E': theta_E, 'gamma': gamma}] # Einstein radius (arcsec) and power-law slope
# anisotropy profile
anisotropy_type = 'OsipkovMerritt'
r_ani = 2.
kwargs_anisotropy = {'r_ani': r_ani} # anisotropy radius [arcsec]
# aperture as slit
aperture_type = 'slit'
length = 3.8
width = 0.9
kwargs_aperture = {'length': length, 'width': width, 'center_ra': 0, 'center_dec': 0, 'angle': 0}
psf_fwhm = 0.7 # Gaussian FWHM psf
kwargs_cosmo = {'D_d': 1000, 'D_s': 1500, 'D_ds': 800}
kwargs_numerics_log = {'sampling_number': 5000, 'interpol_grid_num': 5000, 'log_integration': True,
'max_integrate': 100}
kwargs_numerics_linear = {'sampling_number': 5000, 'interpol_grid_num': 5000, 'log_integration': False,
'max_integrate': 100}
galkin_linear = Galkin(mass_profile_list, light_profile_list, aperture_type=aperture_type, anisotropy_model=anisotropy_type, fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo, kwargs_numerics=kwargs_numerics_linear)
sigma_v = galkin_linear.vel_disp(kwargs_profile, kwargs_light, kwargs_anisotropy, kwargs_aperture)
galkin_log = Galkin(mass_profile_list, light_profile_list, aperture_type=aperture_type,
anisotropy_model=anisotropy_type, fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo, kwargs_numerics=kwargs_numerics_log)
sigma_v2 = galkin_log.vel_disp(kwargs_profile, kwargs_light, kwargs_anisotropy, kwargs_aperture)
print sigma_v, sigma_v2, 'sigma_v linear, sigma_v log'
print (sigma_v/sigma_v2)**2
npt.assert_almost_equal(sigma_v/sigma_v2, 1, decimal=2)
def test_compare_power_law(self):
"""
compare power-law profiles analytical vs. numerical
:return:
"""
# light profile
light_profile_list = ['HERNQUIST']
r_eff = .5
kwargs_light = [{'Rs': r_eff, 'sigma0': 1.}] # effective half light radius (2d projected) in arcsec
# 0.551 *
# mass profile
mass_profile_list = ['SPP']
theta_E = 1.2
gamma = 2.
kwargs_profile = [{'theta_E': theta_E, 'gamma': gamma}] # Einstein radius (arcsec) and power-law slope
# anisotropy profile
anisotropy_type = 'OsipkovMerritt'
r_ani = 2.
kwargs_anisotropy = {'r_ani': r_ani} # anisotropy radius [arcsec]
# aperture as slit
aperture_type = 'slit'
length = 1.
width = 0.3
kwargs_aperture = {'length': length, 'width': width, 'center_ra': 0, 'center_dec': 0, 'angle': 0}
psf_fwhm = 1. # Gaussian FWHM psf
kwargs_cosmo = {'D_d': 1000, 'D_s': 1500, 'D_ds': 800}
kwargs_numerics = {'sampling_number': 5000, 'interpol_grid_num': 5000, 'log_integration': False,
'max_integrate': 500}
galkin = Galkin(mass_profile_list, light_profile_list, aperture_type=aperture_type, anisotropy_model=anisotropy_type, fwhm=psf_fwhm, kwargs_cosmo=kwargs_cosmo, kwargs_numerics=kwargs_numerics)
sigma_v = galkin.vel_disp(kwargs_profile, kwargs_light, kwargs_anisotropy, kwargs_aperture)
los_disp = Velocity_dispersion(beta_const=False, b_prior=False, kwargs_cosmo=kwargs_cosmo)
sigma_v2 = los_disp.vel_disp(gamma, theta_E, r_eff, aniso_param=r_ani, R_slit=length, dR_slit=width,
FWHM=psf_fwhm, num=5000)
print sigma_v, sigma_v2, 'sigma_v Galkin, sigma_v los dispersion'
npt.assert_almost_equal(sigma_v2/sigma_v, 1, decimal=2)
def test_projected_light_integral_hernquist(self):
"""
:return:
"""
light_profile_list = ['HERNQUIST']
r_eff = 1.
kwargs_light = [{'Rs': r_eff, 'sigma0': 1.}] # effective half light radius (2d projected) in arcsec
lightProfile = LightProfile(light_profile_list)
R = 2
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 100)
npt.assert_almost_equal(light2d, out[0]*2, decimal=3)
def test_projected_light_integral_hernquist_ellipse(self):
"""
:return:
"""
light_profile_list = ['HERNQUIST_ELLIPSE']
r_eff = 1.
kwargs_light = [{'Rs': r_eff, 'sigma0': 1., 'q': 0.8, 'phi_G': 1.}] # effective half light radius (2d projected) in arcsec
lightProfile = LightProfile(light_profile_list)
R = 2
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 10)
npt.assert_almost_equal(light2d, out[0]*2, decimal=3)
def test_projected_light_integral_pjaffe(self):
"""
:return:
"""
light_profile_list = ['PJAFFE']
kwargs_light = [{'Rs': .5, 'Ra': 0.01, 'sigma0': 1.}] # effective half light radius (2d projected) in arcsec
lightProfile = LightProfile(light_profile_list)
R = 0.01
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 100)
print out, 'out'
npt.assert_almost_equal(light2d/(out[0]*2), 1., decimal=3)
def test_realistic_0(self):
"""
realistic test example
:return:
"""
light_profile_list = ['HERNQUIST']
kwargs_light = [{'Rs': 0.10535462602138289, 'center_x': -0.02678473951679429, 'center_y': 0.88691126347462712, 'sigma0': 3.7114695634960109}]
lightProfile = LightProfile(light_profile_list)
R = 0.01
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 100)
print out, 'out'
npt.assert_almost_equal(light2d/(out[0]*2), 1., decimal=3)
def test_realistic_1(self):
"""
realistic test example
:return:
"""
light_profile_list = ['HERNQUIST_ELLIPSE']
kwargs_light = [{'Rs': 0.10535462602138289, 'q': 0.46728323131925864, 'center_x': -0.02678473951679429, 'center_y': 0.88691126347462712, 'phi_G': 0.74260706384506325, 'sigma0': 3.7114695634960109}]
lightProfile = LightProfile(light_profile_list)
R = 0.01
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 100)
print out, 'out'
npt.assert_almost_equal(light2d/(out[0]*2), 1., decimal=3)
def test_realistic(self):
"""
realistic test example
:return:
"""
light_profile_list = ['HERNQUIST_ELLIPSE', 'PJAFFE_ELLIPSE']
kwargs_light = [{'Rs': 0.10535462602138289, 'q': 0.46728323131925864, 'center_x': -0.02678473951679429, 'center_y': 0.88691126347462712, 'phi_G': 0.74260706384506325, 'sigma0': 3.7114695634960109}, {'Rs': 0.44955054610388684, 'q': 0.66582356813012267, 'center_x': 0.019536801118136753, 'center_y': 0.0218888643537157, 'Ra': 0.0010000053334891974, 'phi_G': -0.33379268413794494, 'sigma0': 967.00280526319796}]
lightProfile = LightProfile(light_profile_list)
R = 0.01
light2d = lightProfile.light_2d(R=R, kwargs_list=kwargs_light)
out = integrate.quad(lambda x: lightProfile.light_3d(np.sqrt(R**2+x**2), kwargs_light), 0, 100)
print out, 'out'
npt.assert_almost_equal(light2d/(out[0]*2), 1., decimal=3)
if __name__ == '__main__':
pytest.main()
| 46.225926
| 416
| 0.640974
| 1,626
| 12,481
| 4.624846
| 0.109471
| 0.044681
| 0.042553
| 0.026596
| 0.842819
| 0.813697
| 0.788564
| 0.773537
| 0.745745
| 0.738431
| 0
| 0.079326
| 0.24349
| 12,481
| 270
| 417
| 46.225926
| 0.717115
| 0.097188
| 0
| 0.610169
| 0
| 0
| 0.091597
| 0
| 0
| 0
| 0
| 0
| 0.056497
| 0
| null | null | 0.00565
| 0.045198
| null | null | 0.045198
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5ed2cb5c1f89171e8b4b2e5b0c11c6fb3d0791d
| 16,181
|
py
|
Python
|
msgraph/cli/command_modules/notes/azext_notes/generated/_client_factory.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
msgraph/cli/command_modules/notes/azext_notes/generated/_client_factory.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | 22
|
2022-03-29T22:54:37.000Z
|
2022-03-29T22:55:27.000Z
|
msgraph/cli/command_modules/notes/azext_notes/generated/_client_factory.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
def cf_notes_cl(cli_ctx, *_):
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from azext_notes.vendored_sdks.notes import Notes
return get_mgmt_service_client(cli_ctx,
Notes,
subscription_bound=False,
base_url_bound=False)
def cf_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups
def cf_group_onenote(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote
def cf_group_onenote_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks
def cf_group_onenote_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_section_groups
def cf_group_onenote_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_section_groups_sections
def cf_group_onenote_notebook_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_section_groups_sections_pages
def cf_group_onenote_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_sections
def cf_group_onenote_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_sections_pages
def cf_group_onenote_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_notebooks_sections_parent_section_group
def cf_group_onenote_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages
def cf_group_onenote_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_notebook
def cf_group_onenote_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_notebook_section_groups
def cf_group_onenote_page_parent_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_notebook_section_groups_sections
def cf_group_onenote_page_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_notebook_sections
def cf_group_onenote_page_parent_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_notebook_sections_parent_section_group
def cf_group_onenote_page_parent_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_section
def cf_group_onenote_page_parent_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_section_parent_notebook
def cf_group_onenote_page_parent_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_section_parent_notebook_section_groups
def cf_group_onenote_page_parent_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_section_parent_section_group
def cf_group_onenote_page_parent_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_pages_parent_section_parent_section_group_parent_notebook
def cf_group_onenote_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups
def cf_group_onenote_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_parent_notebook
def cf_group_onenote_section_group_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_parent_notebook_sections
def cf_group_onenote_section_group_parent_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_parent_notebook_sections_pages
def cf_group_onenote_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_sections
def cf_group_onenote_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_sections_pages
def cf_group_onenote_section_group_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_sections_pages_parent_notebook
def cf_group_onenote_section_group_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_section_groups_sections_parent_notebook
def cf_group_onenote_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections
def cf_group_onenote_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_pages
def cf_group_onenote_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_pages_parent_notebook
def cf_group_onenote_section_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_pages_parent_notebook_section_groups
def cf_group_onenote_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_parent_notebook
def cf_group_onenote_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_parent_notebook_section_groups
def cf_group_onenote_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_parent_section_group
def cf_group_onenote_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).groups_onenote_sections_parent_section_group_parent_notebook
def cf_site(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites
def cf_site_onenote(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote
def cf_site_onenote_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks
def cf_site_onenote_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_section_groups
def cf_site_onenote_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_section_groups_sections
def cf_site_onenote_notebook_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_section_groups_sections_pages
def cf_site_onenote_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_sections
def cf_site_onenote_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_sections_pages
def cf_site_onenote_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_notebooks_sections_parent_section_group
def cf_site_onenote_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages
def cf_site_onenote_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_notebook
def cf_site_onenote_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_notebook_section_groups
def cf_site_onenote_page_parent_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_notebook_section_groups_sections
def cf_site_onenote_page_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_notebook_sections
def cf_site_onenote_page_parent_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_notebook_sections_parent_section_group
def cf_site_onenote_page_parent_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_section
def cf_site_onenote_page_parent_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_section_parent_notebook
def cf_site_onenote_page_parent_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_section_parent_notebook_section_groups
def cf_site_onenote_page_parent_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_section_parent_section_group
def cf_site_onenote_page_parent_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_pages_parent_section_parent_section_group_parent_notebook
def cf_site_onenote_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups
def cf_site_onenote_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_parent_notebook
def cf_site_onenote_section_group_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_parent_notebook_sections
def cf_site_onenote_section_group_parent_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_parent_notebook_sections_pages
def cf_site_onenote_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_sections
def cf_site_onenote_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_sections_pages
def cf_site_onenote_section_group_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_sections_pages_parent_notebook
def cf_site_onenote_section_group_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_section_groups_sections_parent_notebook
def cf_site_onenote_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections
def cf_site_onenote_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_pages
def cf_site_onenote_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_pages_parent_notebook
def cf_site_onenote_section_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_pages_parent_notebook_section_groups
def cf_site_onenote_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_parent_notebook
def cf_site_onenote_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_parent_notebook_section_groups
def cf_site_onenote_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_parent_section_group
def cf_site_onenote_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).sites_onenote_sections_parent_section_group_parent_notebook
def cf_user(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users
def cf_user_onenote(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote
def cf_user_onenote_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks
def cf_user_onenote_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_section_groups
def cf_user_onenote_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_section_groups_sections
def cf_user_onenote_notebook_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_section_groups_sections_pages
def cf_user_onenote_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_sections
def cf_user_onenote_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_sections_pages
def cf_user_onenote_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_notebooks_sections_parent_section_group
def cf_user_onenote_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages
def cf_user_onenote_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_notebook
def cf_user_onenote_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_notebook_section_groups
def cf_user_onenote_page_parent_notebook_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_notebook_section_groups_sections
def cf_user_onenote_page_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_notebook_sections
def cf_user_onenote_page_parent_notebook_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_notebook_sections_parent_section_group
def cf_user_onenote_page_parent_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_section
def cf_user_onenote_page_parent_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_section_parent_notebook
def cf_user_onenote_page_parent_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_section_parent_notebook_section_groups
def cf_user_onenote_page_parent_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_section_parent_section_group
def cf_user_onenote_page_parent_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_pages_parent_section_parent_section_group_parent_notebook
def cf_user_onenote_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups
def cf_user_onenote_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_parent_notebook
def cf_user_onenote_section_group_parent_notebook_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_parent_notebook_sections
def cf_user_onenote_section_group_parent_notebook_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_parent_notebook_sections_pages
def cf_user_onenote_section_group_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_sections
def cf_user_onenote_section_group_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_sections_pages
def cf_user_onenote_section_group_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_sections_pages_parent_notebook
def cf_user_onenote_section_group_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_section_groups_sections_parent_notebook
def cf_user_onenote_section(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections
def cf_user_onenote_section_page(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_pages
def cf_user_onenote_section_page_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_pages_parent_notebook
def cf_user_onenote_section_page_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_pages_parent_notebook_section_groups
def cf_user_onenote_section_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_parent_notebook
def cf_user_onenote_section_parent_notebook_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_parent_notebook_section_groups
def cf_user_onenote_section_parent_section_group(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_parent_section_group
def cf_user_onenote_section_parent_section_group_parent_notebook(cli_ctx, *_):
return cf_notes_cl(cli_ctx).users_onenote_sections_parent_section_group_parent_notebook
| 35.878049
| 104
| 0.850009
| 2,455
| 16,181
| 4.894094
| 0.030957
| 0.108864
| 0.081648
| 0.108864
| 0.963129
| 0.958718
| 0.956388
| 0.948231
| 0.917187
| 0.867749
| 0
| 0
| 0.0851
| 16,181
| 450
| 105
| 35.957778
| 0.811605
| 0.027131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.488789
| false
| 0
| 0.008969
| 0.484305
| 0.986547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
910513912fd130bf9a44c4fcd707e46988a79bd7
| 129
|
py
|
Python
|
src/browserist/factory/__init__.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | 2
|
2022-02-20T10:03:19.000Z
|
2022-03-22T11:17:10.000Z
|
src/browserist/factory/__init__.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | null | null | null |
src/browserist/factory/__init__.py
|
jakob-bagterp/browserist
|
76bd916dd217b7da3759fd6ec3374191002dc091
|
[
"Apache-2.0"
] | null | null | null |
__all__ = ["chromium", "get", "internet_explorer", "safari", "set"]
from . import chromium, get, internet_explorer, safari, set
| 32.25
| 67
| 0.705426
| 15
| 129
| 5.666667
| 0.6
| 0.258824
| 0.447059
| 0.635294
| 0.847059
| 0.847059
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124031
| 129
| 3
| 68
| 43
| 0.752212
| 0
| 0
| 0
| 0
| 0
| 0.286822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.